[ 464.019416] env[61273]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=61273) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 464.019775] env[61273]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=61273) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 464.019821] env[61273]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=61273) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 464.020189] env[61273]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 464.107970] env[61273]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=61273) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:390}} [ 464.117166] env[61273]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.009s {{(pid=61273) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:428}} [ 464.159256] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-af9710c5-d19b-4dcf-af00-ee235f0ef363 None None] Creating reply queue: reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 464.167518] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-af9710c5-d19b-4dcf-af00-ee235f0ef363 None None] Expecting reply to msg 29ac3ca640924aa6954b23ce9fb195c4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 464.181222] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 29ac3ca640924aa6954b23ce9fb195c4 [ 464.721927] env[61273]: INFO nova.virt.driver [None req-af9710c5-d19b-4dcf-af00-ee235f0ef363 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 464.793494] env[61273]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 464.793732] env[61273]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 464.793797] env[61273]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=61273) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 467.732609] env[61273]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-0e9ad63f-dd3d-469f-85e1-cd79b7174542 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 467.751784] env[61273]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=61273) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 467.751871] env[61273]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-497baad7-daa9-4f77-b0fb-117136e88c13 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 467.785565] env[61273]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 68773. [ 467.785700] env[61273]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 2.992s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 467.786296] env[61273]: INFO nova.virt.vmwareapi.driver [None req-af9710c5-d19b-4dcf-af00-ee235f0ef363 None None] VMware vCenter version: 7.0.3 [ 467.789704] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5920a683-a417-4f2b-9a58-ba230de84653 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 467.807947] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f739ae9-b1f2-43fc-94e6-ce8f730762b7 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 467.814744] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0388c9fc-2485-48c8-a487-4bb5379fb270 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 467.823832] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-230f67eb-77c3-453e-a2ea-1bf042293f96 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 467.836123] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb5e7281-9ad8-4583-80a7-8aefa3fa52bc {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 467.842699] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-544ef779-e9bd-4a4f-af9e-934c7331edc4 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 467.873773] env[61273]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-93dd9966-3bcc-4190-a736-63f99de5c075 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 467.879367] env[61273]: DEBUG nova.virt.vmwareapi.driver [None req-af9710c5-d19b-4dcf-af00-ee235f0ef363 None None] Extension org.openstack.compute already exists. {{(pid=61273) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:224}} [ 467.882272] env[61273]: INFO nova.compute.provider_config [None req-af9710c5-d19b-4dcf-af00-ee235f0ef363 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 467.882982] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-af9710c5-d19b-4dcf-af00-ee235f0ef363 None None] Expecting reply to msg cb2934f4e23b49438ca450268aa3cc1c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 467.899431] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cb2934f4e23b49438ca450268aa3cc1c [ 468.386425] env[61273]: DEBUG nova.context [None req-af9710c5-d19b-4dcf-af00-ee235f0ef363 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),84364d98-e6ce-480f-8cca-682929b0326f(cell1) {{(pid=61273) load_cells /opt/stack/nova/nova/context.py:464}} [ 468.388527] env[61273]: DEBUG oslo_concurrency.lockutils [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 468.388748] env[61273]: DEBUG oslo_concurrency.lockutils [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 468.389436] env[61273]: DEBUG oslo_concurrency.lockutils [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 468.389865] env[61273]: DEBUG oslo_concurrency.lockutils [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] Acquiring lock "84364d98-e6ce-480f-8cca-682929b0326f" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 468.390057] env[61273]: DEBUG oslo_concurrency.lockutils [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] Lock "84364d98-e6ce-480f-8cca-682929b0326f" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 468.391076] env[61273]: DEBUG oslo_concurrency.lockutils [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] Lock "84364d98-e6ce-480f-8cca-682929b0326f" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 468.411624] env[61273]: INFO dbcounter [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] Registered counter for database nova_cell0 [ 468.419589] env[61273]: INFO dbcounter [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] Registered counter for database nova_cell1 [ 468.422869] env[61273]: DEBUG oslo_db.sqlalchemy.engines [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=61273) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 468.423471] env[61273]: DEBUG oslo_db.sqlalchemy.engines [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=61273) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 468.427906] env[61273]: ERROR nova.db.main.api [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 468.427906] env[61273]: result = function(*args, **kwargs) [ 468.427906] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 468.427906] env[61273]: return func(*args, **kwargs) [ 468.427906] env[61273]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 468.427906] env[61273]: result = fn(*args, **kwargs) [ 468.427906] env[61273]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 468.427906] env[61273]: return f(*args, **kwargs) [ 468.427906] env[61273]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 468.427906] env[61273]: return db.service_get_minimum_version(context, binaries) [ 468.427906] env[61273]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 468.427906] env[61273]: _check_db_access() [ 468.427906] env[61273]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 468.427906] env[61273]: stacktrace = ''.join(traceback.format_stack()) [ 468.427906] env[61273]: [ 468.429020] env[61273]: ERROR nova.db.main.api [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 468.429020] env[61273]: result = function(*args, **kwargs) [ 468.429020] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 468.429020] env[61273]: return func(*args, **kwargs) [ 468.429020] env[61273]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 468.429020] env[61273]: result = fn(*args, **kwargs) [ 468.429020] env[61273]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 468.429020] env[61273]: return f(*args, **kwargs) [ 468.429020] env[61273]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 468.429020] env[61273]: return db.service_get_minimum_version(context, binaries) [ 468.429020] env[61273]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 468.429020] env[61273]: _check_db_access() [ 468.429020] env[61273]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 468.429020] env[61273]: stacktrace = ''.join(traceback.format_stack()) [ 468.429020] env[61273]: [ 468.429646] env[61273]: WARNING nova.objects.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 468.429646] env[61273]: WARNING nova.objects.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] Failed to get minimum service version for cell 84364d98-e6ce-480f-8cca-682929b0326f [ 468.430039] env[61273]: DEBUG oslo_concurrency.lockutils [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] Acquiring lock "singleton_lock" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 468.430204] env[61273]: DEBUG oslo_concurrency.lockutils [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] Acquired lock "singleton_lock" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 468.430445] env[61273]: DEBUG oslo_concurrency.lockutils [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] Releasing lock "singleton_lock" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 468.430767] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] Full set of CONF: {{(pid=61273) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 468.430913] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] ******************************************************************************** {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2600}} [ 468.431042] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] Configuration options gathered from: {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2601}} [ 468.431179] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 468.431369] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2603}} [ 468.431517] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] ================================================================================ {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2605}} [ 468.431735] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] allow_resize_to_same_host = True {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.431906] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] arq_binding_timeout = 300 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.432050] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] backdoor_port = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.432183] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] backdoor_socket = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.432355] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] block_device_allocate_retries = 60 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.432518] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] block_device_allocate_retries_interval = 3 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.432706] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cert = self.pem {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.432873] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.433039] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] compute_monitors = [] {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.433201] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] config_dir = [] {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.433368] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] config_drive_format = iso9660 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.433505] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.433673] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] config_source = [] {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.433840] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] console_host = devstack {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.434001] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] control_exchange = nova {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.434156] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cpu_allocation_ratio = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.434312] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] daemon = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.434474] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] debug = True {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.434629] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] default_access_ip_network_name = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.434795] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] default_availability_zone = nova {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.434948] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] default_ephemeral_format = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.435103] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] default_green_pool_size = 1000 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.435347] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.435512] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] default_schedule_zone = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.435670] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] disk_allocation_ratio = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.435828] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] enable_new_services = True {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.436012] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] enabled_apis = ['osapi_compute'] {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.436204] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] enabled_ssl_apis = [] {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.436367] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] flat_injected = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.436527] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] force_config_drive = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.436688] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] force_raw_images = True {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.436859] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] graceful_shutdown_timeout = 5 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.437018] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] heal_instance_info_cache_interval = 60 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.437224] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] host = cpu-1 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.437394] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.437556] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] initial_disk_allocation_ratio = 1.0 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.437718] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] initial_ram_allocation_ratio = 1.0 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.437927] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.438089] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] instance_build_timeout = 0 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.438251] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] instance_delete_interval = 300 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.438419] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] instance_format = [instance: %(uuid)s] {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.438583] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] instance_name_template = instance-%08x {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.438743] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] instance_usage_audit = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.438909] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] instance_usage_audit_period = month {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.439073] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.439237] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] instances_path = /opt/stack/data/nova/instances {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.439402] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] internal_service_availability_zone = internal {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.439561] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] key = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.439723] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] live_migration_retry_count = 30 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.439884] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] log_color = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.440061] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] log_config_append = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.440234] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.440395] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] log_dir = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.440553] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] log_file = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.440682] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] log_options = True {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.440840] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] log_rotate_interval = 1 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.441008] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] log_rotate_interval_type = days {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.441173] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] log_rotation_type = none {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.441310] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.441452] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.441626] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.441800] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.441930] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.442092] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] long_rpc_timeout = 1800 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.442251] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] max_concurrent_builds = 10 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.442465] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] max_concurrent_live_migrations = 1 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.442589] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] max_concurrent_snapshots = 5 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.442760] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] max_local_block_devices = 3 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.442920] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] max_logfile_count = 30 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.443078] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] max_logfile_size_mb = 200 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.443237] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] maximum_instance_delete_attempts = 5 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.443405] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] metadata_listen = 0.0.0.0 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.443571] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] metadata_listen_port = 8775 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.443742] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] metadata_workers = 2 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.443903] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] migrate_max_retries = -1 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.444098] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] mkisofs_cmd = genisoimage {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.444313] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] my_block_storage_ip = 10.180.1.21 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.444447] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] my_ip = 10.180.1.21 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.444608] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] network_allocate_retries = 0 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.444789] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.444956] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] osapi_compute_listen = 0.0.0.0 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.445119] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] osapi_compute_listen_port = 8774 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.445296] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] osapi_compute_unique_server_name_scope = {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.445451] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] osapi_compute_workers = 2 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.445612] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] password_length = 12 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.445773] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] periodic_enable = True {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.445932] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] periodic_fuzzy_delay = 60 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.446098] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] pointer_model = usbtablet {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.446264] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] preallocate_images = none {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.446424] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] publish_errors = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.446554] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] pybasedir = /opt/stack/nova {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.446709] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] ram_allocation_ratio = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.446869] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] rate_limit_burst = 0 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.447036] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] rate_limit_except_level = CRITICAL {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.447196] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] rate_limit_interval = 0 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.447355] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] reboot_timeout = 0 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.447511] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] reclaim_instance_interval = 0 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.447667] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] record = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.447834] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] reimage_timeout_per_gb = 60 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.447999] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] report_interval = 120 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.448175] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] rescue_timeout = 0 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.448334] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] reserved_host_cpus = 0 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.448491] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] reserved_host_disk_mb = 0 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.448650] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] reserved_host_memory_mb = 512 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.448808] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] reserved_huge_pages = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.448967] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] resize_confirm_window = 0 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.449124] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] resize_fs_using_block_device = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.449281] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] resume_guests_state_on_host_boot = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.449449] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.449609] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] rpc_response_timeout = 60 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.449777] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] run_external_periodic_tasks = True {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.449995] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] running_deleted_instance_action = reap {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.450168] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] running_deleted_instance_poll_interval = 1800 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.450329] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] running_deleted_instance_timeout = 0 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.450487] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] scheduler_instance_sync_interval = 120 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.450659] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] service_down_time = 720 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.450852] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] servicegroup_driver = db {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.451029] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] shelved_offload_time = 0 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.451221] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] shelved_poll_interval = 3600 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.451364] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] shutdown_timeout = 0 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.451550] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] source_is_ipv6 = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.451724] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] ssl_only = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.451970] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.452157] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] sync_power_state_interval = 600 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.452324] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] sync_power_state_pool_size = 1000 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.452495] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] syslog_log_facility = LOG_USER {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.452678] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] tempdir = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.452854] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] timeout_nbd = 10 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.453025] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] transport_url = **** {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.453188] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] update_resources_interval = 0 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.453349] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] use_cow_images = True {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.453509] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] use_eventlog = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.453671] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] use_journal = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.453833] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] use_json = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.453990] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] use_rootwrap_daemon = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.454151] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] use_stderr = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.454309] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] use_syslog = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.454463] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vcpu_pin_set = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.454628] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vif_plugging_is_fatal = True {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.454800] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vif_plugging_timeout = 300 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.454967] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] virt_mkfs = [] {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.455128] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] volume_usage_poll_interval = 0 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.455287] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] watch_log_file = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.455497] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] web = /usr/share/spice-html5 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 468.455633] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_concurrency.disable_process_locking = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.456195] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.456392] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.456567] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.456744] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.456921] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.457089] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.457273] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] api.auth_strategy = keystone {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.457443] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] api.compute_link_prefix = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.457621] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.457800] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] api.dhcp_domain = novalocal {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.457969] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] api.enable_instance_password = True {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.458135] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] api.glance_link_prefix = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.458303] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.458476] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.458640] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] api.instance_list_per_project_cells = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.458804] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] api.list_records_by_skipping_down_cells = True {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.458966] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] api.local_metadata_per_cell = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.459136] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] api.max_limit = 1000 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.459305] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] api.metadata_cache_expiration = 15 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.459479] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] api.neutron_default_tenant_id = default {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.459651] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] api.response_validation = warn {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.459821] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] api.use_neutron_default_nets = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.459991] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.460171] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.460340] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.460519] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.460692] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] api.vendordata_dynamic_targets = [] {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.460857] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] api.vendordata_jsonfile_path = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.461037] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.461234] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cache.backend = dogpile.cache.memcached {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.461404] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cache.backend_argument = **** {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.461605] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cache.config_prefix = cache.oslo {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.461793] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cache.dead_timeout = 60.0 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.461961] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cache.debug_cache_backend = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.462124] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cache.enable_retry_client = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.462287] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cache.enable_socket_keepalive = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.462458] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cache.enabled = True {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.462689] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cache.enforce_fips_mode = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.462866] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cache.expiration_time = 600 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.463038] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cache.hashclient_retry_attempts = 2 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.463209] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cache.hashclient_retry_delay = 1.0 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.463375] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cache.memcache_dead_retry = 300 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.463537] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cache.memcache_password = **** {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.463704] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.463870] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.464044] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cache.memcache_pool_maxsize = 10 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.464220] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.464384] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cache.memcache_sasl_enabled = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.464564] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.464735] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cache.memcache_socket_timeout = 1.0 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.464897] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cache.memcache_username = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.465063] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cache.proxies = [] {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.465227] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cache.redis_db = 0 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.465390] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cache.redis_password = **** {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.465628] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cache.redis_sentinel_service_name = mymaster {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.465827] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.466004] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cache.redis_server = localhost:6379 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.466173] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cache.redis_socket_timeout = 1.0 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.466337] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cache.redis_username = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.466505] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cache.retry_attempts = 2 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.466674] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cache.retry_delay = 0.0 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.466840] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cache.socket_keepalive_count = 1 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.467003] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cache.socket_keepalive_idle = 1 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.467165] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cache.socket_keepalive_interval = 1 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.467323] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cache.tls_allowed_ciphers = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.467483] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cache.tls_cafile = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.467642] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cache.tls_certfile = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.467805] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cache.tls_enabled = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.467963] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cache.tls_keyfile = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.468155] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cinder.auth_section = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.468330] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cinder.auth_type = password {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.468494] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cinder.cafile = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.468671] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cinder.catalog_info = volumev3::publicURL {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.468830] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cinder.certfile = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.468992] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cinder.collect_timing = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.469153] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cinder.cross_az_attach = True {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.469315] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cinder.debug = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.469475] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cinder.endpoint_template = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.469639] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cinder.http_retries = 3 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.469804] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cinder.insecure = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.469963] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cinder.keyfile = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.470133] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cinder.os_region_name = RegionOne {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.470294] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cinder.split_loggers = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.470453] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cinder.timeout = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.470646] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.470828] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] compute.cpu_dedicated_set = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.470991] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] compute.cpu_shared_set = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.471156] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] compute.image_type_exclude_list = [] {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.471319] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.471484] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] compute.max_concurrent_disk_ops = 0 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.471685] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] compute.max_disk_devices_to_attach = -1 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.471864] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.472051] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.472222] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] compute.resource_provider_association_refresh = 300 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.472384] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.472545] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] compute.shutdown_retry_interval = 10 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.472797] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.472940] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] conductor.workers = 2 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.473118] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] console.allowed_origins = [] {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.473281] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] console.ssl_ciphers = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.473450] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] console.ssl_minimum_version = default {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.473620] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] consoleauth.enforce_session_timeout = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.473792] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] consoleauth.token_ttl = 600 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.473958] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cyborg.cafile = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.474117] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cyborg.certfile = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.474280] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cyborg.collect_timing = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.474439] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cyborg.connect_retries = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.474596] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cyborg.connect_retry_delay = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.474754] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cyborg.endpoint_override = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.474915] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cyborg.insecure = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.475075] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cyborg.keyfile = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.475233] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cyborg.max_version = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.475392] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cyborg.min_version = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.475575] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cyborg.region_name = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.475705] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cyborg.retriable_status_codes = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.475855] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cyborg.service_name = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.476034] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cyborg.service_type = accelerator {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.476201] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cyborg.split_loggers = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.476358] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cyborg.status_code_retries = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.476515] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cyborg.status_code_retry_delay = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.476674] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cyborg.timeout = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.476851] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.477025] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] cyborg.version = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.477206] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] database.backend = sqlalchemy {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.477376] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] database.connection = **** {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.477557] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] database.connection_debug = 0 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.477709] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] database.connection_parameters = {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.477874] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] database.connection_recycle_time = 3600 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.478037] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] database.connection_trace = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.478199] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] database.db_inc_retry_interval = True {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.478361] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] database.db_max_retries = 20 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.478522] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] database.db_max_retry_interval = 10 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.478685] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] database.db_retry_interval = 1 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.478847] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] database.max_overflow = 50 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.479007] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] database.max_pool_size = 5 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.479167] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] database.max_retries = 10 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.479334] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.479491] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] database.mysql_wsrep_sync_wait = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.479648] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] database.pool_timeout = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.479808] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] database.retry_interval = 10 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.479968] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] database.slave_connection = **** {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.480140] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] database.sqlite_synchronous = True {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.480301] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] database.use_db_reconnect = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.480479] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] api_database.backend = sqlalchemy {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.480648] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] api_database.connection = **** {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.480815] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] api_database.connection_debug = 0 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.480988] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] api_database.connection_parameters = {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.481151] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] api_database.connection_recycle_time = 3600 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.481314] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] api_database.connection_trace = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.481475] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] api_database.db_inc_retry_interval = True {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.481662] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] api_database.db_max_retries = 20 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.481830] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] api_database.db_max_retry_interval = 10 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.481992] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] api_database.db_retry_interval = 1 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.482156] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] api_database.max_overflow = 50 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.482320] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] api_database.max_pool_size = 5 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.482481] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] api_database.max_retries = 10 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.482667] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.482842] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.483004] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] api_database.pool_timeout = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.483168] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] api_database.retry_interval = 10 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.483327] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] api_database.slave_connection = **** {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.483490] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] api_database.sqlite_synchronous = True {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.483668] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] devices.enabled_mdev_types = [] {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.483847] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.484023] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] ephemeral_storage_encryption.default_format = luks {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.484192] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] ephemeral_storage_encryption.enabled = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.484356] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.484524] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] glance.api_servers = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.484690] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] glance.cafile = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.484851] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] glance.certfile = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.485012] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] glance.collect_timing = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.485171] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] glance.connect_retries = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.485329] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] glance.connect_retry_delay = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.485491] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] glance.debug = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.485672] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] glance.default_trusted_certificate_ids = [] {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.485811] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] glance.enable_certificate_validation = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.485973] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] glance.enable_rbd_download = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.486130] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] glance.endpoint_override = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.486294] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] glance.insecure = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.486458] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] glance.keyfile = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.486618] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] glance.max_version = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.486777] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] glance.min_version = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.486938] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] glance.num_retries = 3 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.487107] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] glance.rbd_ceph_conf = {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.487269] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] glance.rbd_connect_timeout = 5 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.487437] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] glance.rbd_pool = {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.487652] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] glance.rbd_user = {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.487769] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] glance.region_name = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.487927] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] glance.retriable_status_codes = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.488100] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] glance.service_name = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.488274] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] glance.service_type = image {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.488437] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] glance.split_loggers = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.488595] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] glance.status_code_retries = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.488756] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] glance.status_code_retry_delay = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.488965] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] glance.timeout = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.489176] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.489354] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] glance.verify_glance_signatures = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.489516] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] glance.version = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.489687] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] guestfs.debug = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.489855] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] mks.enabled = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.490202] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.490394] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] image_cache.manager_interval = 2400 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.490564] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] image_cache.precache_concurrency = 1 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.490747] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] image_cache.remove_unused_base_images = True {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.490919] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.491085] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.491260] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] image_cache.subdirectory_name = _base {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.491436] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] ironic.api_max_retries = 60 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.491639] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] ironic.api_retry_interval = 2 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.491806] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] ironic.auth_section = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.491971] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] ironic.auth_type = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.492147] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] ironic.cafile = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.492311] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] ironic.certfile = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.492475] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] ironic.collect_timing = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.492655] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] ironic.conductor_group = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.492832] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] ironic.connect_retries = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.493006] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] ironic.connect_retry_delay = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.493152] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] ironic.endpoint_override = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.493313] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] ironic.insecure = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.493472] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] ironic.keyfile = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.493630] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] ironic.max_version = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.493790] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] ironic.min_version = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.493955] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] ironic.peer_list = [] {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.494120] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] ironic.region_name = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.494271] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] ironic.retriable_status_codes = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.494434] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] ironic.serial_console_state_timeout = 10 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.494592] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] ironic.service_name = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.494761] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] ironic.service_type = baremetal {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.494920] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] ironic.shard = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.495082] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] ironic.split_loggers = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.495241] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] ironic.status_code_retries = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.495401] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] ironic.status_code_retry_delay = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.495560] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] ironic.timeout = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.495768] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.495917] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] ironic.version = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.496090] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.496270] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] key_manager.fixed_key = **** {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.496452] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.496615] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] barbican.barbican_api_version = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.496775] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] barbican.barbican_endpoint = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.496944] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] barbican.barbican_endpoint_type = public {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.497132] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] barbican.barbican_region_name = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.497261] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] barbican.cafile = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.497420] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] barbican.certfile = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.497583] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] barbican.collect_timing = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.497747] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] barbican.insecure = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.497900] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] barbican.keyfile = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.498060] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] barbican.number_of_retries = 60 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.498221] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] barbican.retry_delay = 1 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.498381] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] barbican.send_service_user_token = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.498542] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] barbican.split_loggers = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.498700] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] barbican.timeout = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.498858] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] barbican.verify_ssl = True {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.499014] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] barbican.verify_ssl_path = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.499180] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] barbican_service_user.auth_section = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.499341] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] barbican_service_user.auth_type = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.499499] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] barbican_service_user.cafile = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.499657] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] barbican_service_user.certfile = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.499817] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] barbican_service_user.collect_timing = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.499976] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] barbican_service_user.insecure = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.500149] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] barbican_service_user.keyfile = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.500311] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] barbican_service_user.split_loggers = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.500468] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] barbican_service_user.timeout = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.500632] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vault.approle_role_id = **** {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.500790] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vault.approle_secret_id = **** {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.500961] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vault.kv_mountpoint = secret {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.501121] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vault.kv_path = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.501285] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vault.kv_version = 2 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.501444] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vault.namespace = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.501623] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vault.root_token_id = **** {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.501793] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vault.ssl_ca_crt_file = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.501962] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vault.timeout = 60.0 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.502126] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vault.use_ssl = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.502295] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.502467] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] keystone.auth_section = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.502647] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] keystone.auth_type = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.502821] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] keystone.cafile = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.502983] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] keystone.certfile = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.503145] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] keystone.collect_timing = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.503302] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] keystone.connect_retries = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.503460] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] keystone.connect_retry_delay = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.503618] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] keystone.endpoint_override = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.503780] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] keystone.insecure = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.503940] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] keystone.keyfile = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.504111] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] keystone.max_version = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.504271] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] keystone.min_version = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.504429] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] keystone.region_name = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.504586] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] keystone.retriable_status_codes = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.504745] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] keystone.service_name = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.504914] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] keystone.service_type = identity {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.505072] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] keystone.split_loggers = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.505229] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] keystone.status_code_retries = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.505385] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] keystone.status_code_retry_delay = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.505541] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] keystone.timeout = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.505721] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.505883] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] keystone.version = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.506086] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.connection_uri = {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.506249] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.cpu_mode = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.506416] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.cpu_model_extra_flags = [] {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.506583] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.cpu_models = [] {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.506757] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.cpu_power_governor_high = performance {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.506924] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.cpu_power_governor_low = powersave {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.507087] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.cpu_power_management = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.507257] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.507420] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.device_detach_attempts = 8 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.507582] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.device_detach_timeout = 20 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.507746] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.disk_cachemodes = [] {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.507905] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.disk_prefix = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.508085] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.enabled_perf_events = [] {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.508254] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.file_backed_memory = 0 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.508419] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.gid_maps = [] {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.508578] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.hw_disk_discard = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.508739] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.hw_machine_type = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.508912] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.images_rbd_ceph_conf = {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.509079] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.509240] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.509406] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.images_rbd_glance_store_name = {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.509575] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.images_rbd_pool = rbd {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.509747] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.images_type = default {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.509906] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.images_volume_group = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.510067] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.inject_key = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.510229] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.inject_partition = -2 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.510390] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.inject_password = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.510551] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.iscsi_iface = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.510715] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.iser_use_multipath = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.510876] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.live_migration_bandwidth = 0 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.511039] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.511198] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.live_migration_downtime = 500 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.511358] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.511546] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.511711] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.live_migration_inbound_addr = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.511874] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.512050] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.live_migration_permit_post_copy = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.512208] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.live_migration_scheme = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.512382] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.live_migration_timeout_action = abort {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.512545] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.live_migration_tunnelled = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.512730] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.live_migration_uri = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.512897] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.live_migration_with_native_tls = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.513059] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.max_queues = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.513220] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.513440] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.513604] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.nfs_mount_options = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.513886] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.514057] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.514220] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.num_iser_scan_tries = 5 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.514381] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.num_memory_encrypted_guests = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.514543] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.514706] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.num_pcie_ports = 0 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.514871] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.num_volume_scan_tries = 5 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.515035] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.pmem_namespaces = [] {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.515196] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.quobyte_client_cfg = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.515475] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.515650] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.rbd_connect_timeout = 5 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.515817] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.515984] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.516161] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.rbd_secret_uuid = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.516323] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.rbd_user = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.516487] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.516661] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.remote_filesystem_transport = ssh {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.516821] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.rescue_image_id = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.516978] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.rescue_kernel_id = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.517137] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.rescue_ramdisk_id = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.517306] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.517466] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.rx_queue_size = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.517633] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.smbfs_mount_options = {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.517930] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.518071] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.snapshot_compression = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.518231] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.snapshot_image_format = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.518447] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.518615] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.sparse_logical_volumes = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.518781] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.swtpm_enabled = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.518950] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.swtpm_group = tss {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.519116] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.swtpm_user = tss {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.519286] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.sysinfo_serial = unique {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.519445] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.tb_cache_size = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.519604] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.tx_queue_size = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.519771] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.uid_maps = [] {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.519933] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.use_virtio_for_bridges = True {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.520119] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.virt_type = kvm {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.520321] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.volume_clear = zero {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.520489] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.volume_clear_size = 0 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.520655] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.volume_use_multipath = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.520814] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.vzstorage_cache_path = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.520985] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.521154] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.vzstorage_mount_group = qemu {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.521316] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.vzstorage_mount_opts = [] {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.521492] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.521777] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.521959] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.vzstorage_mount_user = stack {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.522125] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.522302] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] neutron.auth_section = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.522477] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] neutron.auth_type = password {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.522659] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] neutron.cafile = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.522833] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] neutron.certfile = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.522997] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] neutron.collect_timing = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.523154] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] neutron.connect_retries = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.523336] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] neutron.connect_retry_delay = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.523478] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] neutron.default_floating_pool = public {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.523638] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] neutron.endpoint_override = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.523802] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] neutron.extension_sync_interval = 600 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.523965] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] neutron.http_retries = 3 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.524142] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] neutron.insecure = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.524305] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] neutron.keyfile = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.524465] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] neutron.max_version = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.524636] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.524797] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] neutron.min_version = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.524966] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] neutron.ovs_bridge = br-int {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.525131] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] neutron.physnets = [] {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.525301] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] neutron.region_name = RegionOne {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.525461] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] neutron.retriable_status_codes = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.525628] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] neutron.service_metadata_proxy = True {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.525788] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] neutron.service_name = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.525953] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] neutron.service_type = network {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.526114] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] neutron.split_loggers = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.526272] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] neutron.status_code_retries = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.526428] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] neutron.status_code_retry_delay = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.526585] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] neutron.timeout = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.526766] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.526926] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] neutron.version = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.527099] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] notifications.bdms_in_notifications = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.527276] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] notifications.default_level = INFO {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.527452] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] notifications.notification_format = unversioned {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.527617] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] notifications.notify_on_state_change = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.527793] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.528035] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] pci.alias = [] {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.528157] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] pci.device_spec = [] {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.528324] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] pci.report_in_placement = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.528500] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] placement.auth_section = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.528678] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] placement.auth_type = password {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.528847] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.529009] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] placement.cafile = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.529170] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] placement.certfile = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.529332] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] placement.collect_timing = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.529492] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] placement.connect_retries = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.529652] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] placement.connect_retry_delay = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.529810] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] placement.default_domain_id = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.529970] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] placement.default_domain_name = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.530126] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] placement.domain_id = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.530285] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] placement.domain_name = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.530444] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] placement.endpoint_override = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.530605] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] placement.insecure = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.530767] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] placement.keyfile = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.530926] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] placement.max_version = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.531137] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] placement.min_version = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.531338] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] placement.password = **** {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.531523] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] placement.project_domain_id = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.531710] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] placement.project_domain_name = Default {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.531881] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] placement.project_id = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.532071] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] placement.project_name = service {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.532248] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] placement.region_name = RegionOne {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.532413] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] placement.retriable_status_codes = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.532577] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] placement.service_name = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.532773] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] placement.service_type = placement {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.532945] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] placement.split_loggers = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.533107] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] placement.status_code_retries = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.533269] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] placement.status_code_retry_delay = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.533450] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] placement.system_scope = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.533585] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] placement.timeout = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.533745] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] placement.trust_id = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.533905] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] placement.user_domain_id = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.534073] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] placement.user_domain_name = Default {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.534231] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] placement.user_id = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.534404] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] placement.username = nova {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.534585] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.534749] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] placement.version = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.534925] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] quota.cores = 20 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.535090] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] quota.count_usage_from_placement = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.535263] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.535431] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] quota.injected_file_content_bytes = 10240 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.535598] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] quota.injected_file_path_length = 255 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.535763] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] quota.injected_files = 5 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.535997] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] quota.instances = 10 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.536326] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] quota.key_pairs = 100 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.536445] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] quota.metadata_items = 128 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.536655] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] quota.ram = 51200 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.536836] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] quota.recheck_quota = True {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.537007] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] quota.server_group_members = 10 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.537172] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] quota.server_groups = 10 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.537344] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.537508] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.537675] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] scheduler.image_metadata_prefilter = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.537837] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.538005] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] scheduler.max_attempts = 3 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.538164] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] scheduler.max_placement_results = 1000 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.538327] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.538490] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] scheduler.query_placement_for_image_type_support = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.538654] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.538826] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] scheduler.workers = 2 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.538999] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.539170] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.539347] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.539518] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.539685] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.539844] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.540019] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.540216] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.540384] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] filter_scheduler.host_subset_size = 1 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.540549] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.540711] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.540877] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.541046] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] filter_scheduler.isolated_hosts = [] {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.541210] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] filter_scheduler.isolated_images = [] {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.541371] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.541566] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.541745] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.541911] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] filter_scheduler.pci_in_placement = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.542079] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.542240] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.542433] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.542652] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.542843] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.543013] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.543178] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] filter_scheduler.track_instance_changes = True {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.543359] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.543557] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] metrics.required = True {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.543697] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] metrics.weight_multiplier = 1.0 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.543864] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.544051] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] metrics.weight_setting = [] {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.544362] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.544541] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] serial_console.enabled = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.544720] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] serial_console.port_range = 10000:20000 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.544891] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.545060] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.545229] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] serial_console.serialproxy_port = 6083 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.545399] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] service_user.auth_section = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.545573] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] service_user.auth_type = password {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.545738] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] service_user.cafile = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.545896] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] service_user.certfile = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.546076] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] service_user.collect_timing = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.546249] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] service_user.insecure = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.546368] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] service_user.keyfile = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.546538] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] service_user.send_service_user_token = True {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.546702] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] service_user.split_loggers = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.546860] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] service_user.timeout = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.547030] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] spice.agent_enabled = True {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.547193] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] spice.enabled = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.547497] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.547712] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.547879] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] spice.html5proxy_port = 6082 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.548055] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] spice.image_compression = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.548224] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] spice.jpeg_compression = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.548379] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] spice.playback_compression = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.548548] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] spice.server_listen = 127.0.0.1 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.548719] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.548881] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] spice.streaming_mode = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.549039] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] spice.zlib_compression = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.549205] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] upgrade_levels.baseapi = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.549376] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] upgrade_levels.compute = auto {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.549558] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] upgrade_levels.conductor = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.549780] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] upgrade_levels.scheduler = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.549992] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vendordata_dynamic_auth.auth_section = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.550441] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vendordata_dynamic_auth.auth_type = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.550441] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vendordata_dynamic_auth.cafile = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.550441] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vendordata_dynamic_auth.certfile = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.550779] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.550834] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vendordata_dynamic_auth.insecure = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.550950] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vendordata_dynamic_auth.keyfile = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.551115] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.551274] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vendordata_dynamic_auth.timeout = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.551449] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vmware.api_retry_count = 10 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.551644] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vmware.ca_file = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.551830] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vmware.cache_prefix = devstack-image-cache {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.552037] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vmware.cluster_name = testcl1 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.552188] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vmware.connection_pool_size = 10 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.552349] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vmware.console_delay_seconds = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.552521] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vmware.datastore_regex = ^datastore.* {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.552790] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.552990] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vmware.host_password = **** {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.553164] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vmware.host_port = 443 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.553337] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vmware.host_username = administrator@vsphere.local {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.553507] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vmware.insecure = True {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.553824] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vmware.integration_bridge = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.553824] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vmware.maximum_objects = 100 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.553982] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vmware.pbm_default_policy = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.554142] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vmware.pbm_enabled = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.554301] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vmware.pbm_wsdl_location = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.554468] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.554627] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vmware.serial_port_proxy_uri = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.554787] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vmware.serial_port_service_uri = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.554953] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vmware.task_poll_interval = 0.5 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.555124] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vmware.use_linked_clone = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.555292] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vmware.vnc_keymap = en-us {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.555456] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vmware.vnc_port = 5900 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.555619] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vmware.vnc_port_total = 10000 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.555806] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vnc.auth_schemes = ['none'] {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.555981] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vnc.enabled = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.556347] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.556536] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.556660] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vnc.novncproxy_port = 6080 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.556836] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vnc.server_listen = 127.0.0.1 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.557009] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.557173] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vnc.vencrypt_ca_certs = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.557333] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vnc.vencrypt_client_cert = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.557491] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vnc.vencrypt_client_key = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.557669] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.557833] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] workarounds.disable_deep_image_inspection = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.557995] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.558157] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.558321] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.558475] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] workarounds.disable_rootwrap = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.558635] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] workarounds.enable_numa_live_migration = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.558797] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.558956] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.559114] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.559273] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] workarounds.libvirt_disable_apic = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.559432] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.559617] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.559788] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.559953] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.560130] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.560294] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.560456] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.560615] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.560775] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.560941] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.561126] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.561298] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] wsgi.client_socket_timeout = 900 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.561465] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] wsgi.default_pool_size = 1000 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.561661] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] wsgi.keep_alive = True {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.561835] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] wsgi.max_header_line = 16384 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.562000] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] wsgi.secure_proxy_ssl_header = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.562162] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] wsgi.ssl_ca_file = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.562322] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] wsgi.ssl_cert_file = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.562485] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] wsgi.ssl_key_file = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.562665] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] wsgi.tcp_keepidle = 600 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.562859] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.563028] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] zvm.ca_file = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.563191] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] zvm.cloud_connector_url = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.563478] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.563654] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] zvm.reachable_timeout = 300 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.563844] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_policy.enforce_new_defaults = True {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.564049] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_policy.enforce_scope = True {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.564200] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_policy.policy_default_rule = default {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.564394] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.564570] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_policy.policy_file = policy.yaml {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.564746] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.564907] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.565067] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.565224] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.565385] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.565551] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.565727] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.565903] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] profiler.connection_string = messaging:// {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.566070] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] profiler.enabled = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.566240] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] profiler.es_doc_type = notification {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.566445] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] profiler.es_scroll_size = 10000 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.566569] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] profiler.es_scroll_time = 2m {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.566732] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] profiler.filter_error_trace = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.566897] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] profiler.hmac_keys = **** {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.567065] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] profiler.sentinel_service_name = mymaster {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.567229] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] profiler.socket_timeout = 0.1 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.567391] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] profiler.trace_requests = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.567552] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] profiler.trace_sqlalchemy = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.567728] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] profiler_jaeger.process_tags = {} {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.567892] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] profiler_jaeger.service_name_prefix = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.568068] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] profiler_otlp.service_name_prefix = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.568240] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] remote_debug.host = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.568417] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] remote_debug.port = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.568579] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.568745] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.568909] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.569072] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.569234] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.569397] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.569590] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.569776] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.569942] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.570112] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.570273] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.570443] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.570610] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.570783] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.570954] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.571121] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.571284] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.571456] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.571649] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.571830] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.572015] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.572196] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.572360] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.572528] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.572710] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.572879] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.573043] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.573205] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.573371] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.573537] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_messaging_rabbit.ssl = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.573713] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.573887] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.574042] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.574211] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.574381] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_messaging_rabbit.ssl_version = {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.574543] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.574734] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.574900] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_messaging_notifications.retry = -1 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.575081] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.575255] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_messaging_notifications.transport_url = **** {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.575430] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_limit.auth_section = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.575595] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_limit.auth_type = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.575757] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_limit.cafile = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.575915] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_limit.certfile = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.576088] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_limit.collect_timing = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.576250] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_limit.connect_retries = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.576408] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_limit.connect_retry_delay = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.576566] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_limit.endpoint_id = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.576766] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_limit.endpoint_override = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.576885] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_limit.insecure = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.577040] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_limit.keyfile = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.577193] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_limit.max_version = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.577348] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_limit.min_version = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.577503] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_limit.region_name = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.577662] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_limit.retriable_status_codes = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.577930] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_limit.service_name = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.578006] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_limit.service_type = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.578127] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_limit.split_loggers = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.578281] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_limit.status_code_retries = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.578436] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_limit.status_code_retry_delay = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.578593] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_limit.timeout = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.578751] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_limit.valid_interfaces = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.578907] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_limit.version = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.579069] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_reports.file_event_handler = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.579233] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.579390] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] oslo_reports.log_dir = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.579581] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.579757] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.579919] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.580099] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.580266] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.580428] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.580728] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.580960] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vif_plug_ovs_privileged.group = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.581136] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.581353] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.581608] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.581810] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] vif_plug_ovs_privileged.user = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.581991] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] os_vif_linux_bridge.flat_interface = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.582175] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.582352] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.582527] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.582703] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.582874] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.583041] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.583205] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.583386] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.583559] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] os_vif_ovs.isolate_vif = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.583729] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.583899] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.584082] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.584261] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] os_vif_ovs.ovsdb_interface = native {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.584424] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] os_vif_ovs.per_port_bridge = False {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.584602] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] os_brick.lock_path = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.584840] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] privsep_osbrick.capabilities = [21] {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.585018] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] privsep_osbrick.group = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.585182] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] privsep_osbrick.helper_command = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.585351] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.585518] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.585682] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] privsep_osbrick.user = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.585859] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.586020] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] nova_sys_admin.group = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.586179] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] nova_sys_admin.helper_command = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.586345] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.586509] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.586669] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] nova_sys_admin.user = None {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 468.586800] env[61273]: DEBUG oslo_service.service [None req-867debec-b00d-4c6f-909e-aecc3285dcd4 None None] ******************************************************************************** {{(pid=61273) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2624}} [ 468.587225] env[61273]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 468.588095] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5170bda3-8684-4996-8d16-1c94376f3080 None None] Expecting reply to msg 897ac020bcbe4a338e9470c4c5f9b9d1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 468.595701] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 897ac020bcbe4a338e9470c4c5f9b9d1 [ 469.090251] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-5170bda3-8684-4996-8d16-1c94376f3080 None None] Getting list of instances from cluster (obj){ [ 469.090251] env[61273]: value = "domain-c8" [ 469.090251] env[61273]: _type = "ClusterComputeResource" [ 469.090251] env[61273]: } {{(pid=61273) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 469.091328] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ec517e2-2a7a-4ae4-888a-a9574c17e251 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 469.101107] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-5170bda3-8684-4996-8d16-1c94376f3080 None None] Got total of 0 instances {{(pid=61273) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 469.101728] env[61273]: WARNING nova.virt.vmwareapi.driver [None req-5170bda3-8684-4996-8d16-1c94376f3080 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 469.102188] env[61273]: INFO nova.virt.node [None req-5170bda3-8684-4996-8d16-1c94376f3080 None None] Generated node identity 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb [ 469.102424] env[61273]: INFO nova.virt.node [None req-5170bda3-8684-4996-8d16-1c94376f3080 None None] Wrote node identity 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb to /opt/stack/data/n-cpu-1/compute_id [ 469.102824] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5170bda3-8684-4996-8d16-1c94376f3080 None None] Expecting reply to msg 4d444daa12e54be4a799f5993489efd8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 469.115485] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4d444daa12e54be4a799f5993489efd8 [ 469.605078] env[61273]: WARNING nova.compute.manager [None req-5170bda3-8684-4996-8d16-1c94376f3080 None None] Compute nodes ['4b2a9d85-76d2-47a9-873e-680d9c1d5ccb'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 469.605806] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5170bda3-8684-4996-8d16-1c94376f3080 None None] Expecting reply to msg c638e2a87e584f7dba0566776d9cffb8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 469.631302] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c638e2a87e584f7dba0566776d9cffb8 [ 470.108723] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5170bda3-8684-4996-8d16-1c94376f3080 None None] Expecting reply to msg 0e1067187cf24f1197a3936106441f55 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 470.121139] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0e1067187cf24f1197a3936106441f55 [ 470.611597] env[61273]: INFO nova.compute.manager [None req-5170bda3-8684-4996-8d16-1c94376f3080 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 470.612061] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5170bda3-8684-4996-8d16-1c94376f3080 None None] Expecting reply to msg 150303e46628432d9fed1a398d56eb5e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 470.622534] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 150303e46628432d9fed1a398d56eb5e [ 471.114998] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5170bda3-8684-4996-8d16-1c94376f3080 None None] Expecting reply to msg 5fd3790411ec47018a6744b167de58e8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 471.127472] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5fd3790411ec47018a6744b167de58e8 [ 471.617748] env[61273]: WARNING nova.compute.manager [None req-5170bda3-8684-4996-8d16-1c94376f3080 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 471.618042] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5170bda3-8684-4996-8d16-1c94376f3080 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 471.618270] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5170bda3-8684-4996-8d16-1c94376f3080 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 471.618419] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5170bda3-8684-4996-8d16-1c94376f3080 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 471.618572] env[61273]: DEBUG nova.compute.resource_tracker [None req-5170bda3-8684-4996-8d16-1c94376f3080 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61273) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 471.619462] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-397d4c22-3be3-4d84-9487-c8f8ba9a8d50 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 471.627816] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e07498bd-8a9a-4f47-bde4-f5c416b0d8d2 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 471.641650] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d510e6da-069f-4652-82fa-cb936503ab36 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 471.647860] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0d4cd9d-c893-4c96-80d4-aef5029d0b72 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 471.677145] env[61273]: DEBUG nova.compute.resource_tracker [None req-5170bda3-8684-4996-8d16-1c94376f3080 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181748MB free_disk=141GB free_vcpus=48 pci_devices=None {{(pid=61273) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 471.677307] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5170bda3-8684-4996-8d16-1c94376f3080 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 471.677498] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5170bda3-8684-4996-8d16-1c94376f3080 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 471.677857] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5170bda3-8684-4996-8d16-1c94376f3080 None None] Expecting reply to msg 99791bbe4eea4d76bad0e8a041c285b4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 471.693584] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 99791bbe4eea4d76bad0e8a041c285b4 [ 472.180702] env[61273]: WARNING nova.compute.resource_tracker [None req-5170bda3-8684-4996-8d16-1c94376f3080 None None] No compute node record for cpu-1:4b2a9d85-76d2-47a9-873e-680d9c1d5ccb: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb could not be found. [ 472.182001] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5170bda3-8684-4996-8d16-1c94376f3080 None None] Expecting reply to msg 7b447538dd8d446384ea2bc301f7a346 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 472.194656] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7b447538dd8d446384ea2bc301f7a346 [ 472.684955] env[61273]: INFO nova.compute.resource_tracker [None req-5170bda3-8684-4996-8d16-1c94376f3080 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb [ 472.685359] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5170bda3-8684-4996-8d16-1c94376f3080 None None] Expecting reply to msg 1582b572718845c6819f7ae397ae2d0b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 472.698040] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1582b572718845c6819f7ae397ae2d0b [ 473.188604] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5170bda3-8684-4996-8d16-1c94376f3080 None None] Expecting reply to msg d7579819a38a4e9980ec4432230a516d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 473.208470] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d7579819a38a4e9980ec4432230a516d [ 473.690913] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5170bda3-8684-4996-8d16-1c94376f3080 None None] Expecting reply to msg 5745387fc793451e83cc4ae57b25fb4e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 473.712773] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5745387fc793451e83cc4ae57b25fb4e [ 474.193940] env[61273]: DEBUG nova.compute.resource_tracker [None req-5170bda3-8684-4996-8d16-1c94376f3080 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61273) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 474.194364] env[61273]: DEBUG nova.compute.resource_tracker [None req-5170bda3-8684-4996-8d16-1c94376f3080 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61273) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 474.336814] env[61273]: INFO nova.scheduler.client.report [None req-5170bda3-8684-4996-8d16-1c94376f3080 None None] [req-9ad51316-fe25-45aa-aaf3-8e4f899b39e1] Created resource provider record via placement API for resource provider with UUID 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 474.353062] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee65284c-55de-4b68-addd-e0f41d112c28 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 474.360612] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ce4fe15-3441-4a3c-a7b6-cdbdc88736d3 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 474.389569] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2c5aad0-56ee-4a4a-96f1-3fede80a7cde {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 474.396378] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-454ae567-b2fe-440c-8d70-1339f5c77034 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 474.409782] env[61273]: DEBUG nova.compute.provider_tree [None req-5170bda3-8684-4996-8d16-1c94376f3080 None None] Updating inventory in ProviderTree for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 474.410369] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5170bda3-8684-4996-8d16-1c94376f3080 None None] Expecting reply to msg 3020e3d9604d455c9faff8c79f40043b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 474.417657] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3020e3d9604d455c9faff8c79f40043b [ 474.944726] env[61273]: DEBUG nova.scheduler.client.report [None req-5170bda3-8684-4996-8d16-1c94376f3080 None None] Updated inventory for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 474.944968] env[61273]: DEBUG nova.compute.provider_tree [None req-5170bda3-8684-4996-8d16-1c94376f3080 None None] Updating resource provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb generation from 0 to 1 during operation: update_inventory {{(pid=61273) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 474.945109] env[61273]: DEBUG nova.compute.provider_tree [None req-5170bda3-8684-4996-8d16-1c94376f3080 None None] Updating inventory in ProviderTree for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 474.996376] env[61273]: DEBUG nova.compute.provider_tree [None req-5170bda3-8684-4996-8d16-1c94376f3080 None None] Updating resource provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb generation from 1 to 2 during operation: update_traits {{(pid=61273) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 474.998595] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5170bda3-8684-4996-8d16-1c94376f3080 None None] Expecting reply to msg 3c4291f855f14c5f9db7ecddcef26255 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 475.021449] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3c4291f855f14c5f9db7ecddcef26255 [ 475.501060] env[61273]: DEBUG nova.compute.resource_tracker [None req-5170bda3-8684-4996-8d16-1c94376f3080 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61273) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 475.501345] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5170bda3-8684-4996-8d16-1c94376f3080 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.824s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 475.501416] env[61273]: DEBUG nova.service [None req-5170bda3-8684-4996-8d16-1c94376f3080 None None] Creating RPC server for service compute {{(pid=61273) start /opt/stack/nova/nova/service.py:186}} [ 475.511711] env[61273]: INFO oslo.messaging._drivers.impl_rabbit [None req-5170bda3-8684-4996-8d16-1c94376f3080 None None] Creating fanout queue: compute_fanout_a16620dfd9fc4e4fa3bff905e38d1cf0 [ 475.515837] env[61273]: DEBUG nova.service [None req-5170bda3-8684-4996-8d16-1c94376f3080 None None] Join ServiceGroup membership for this service compute {{(pid=61273) start /opt/stack/nova/nova/service.py:203}} [ 475.516016] env[61273]: DEBUG nova.servicegroup.drivers.db [None req-5170bda3-8684-4996-8d16-1c94376f3080 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=61273) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 480.518120] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg a75619de94754cf0bfcf4f2db84d8669 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 480.553021] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a75619de94754cf0bfcf4f2db84d8669 [ 516.140896] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Acquiring lock "52eb3ce9-696c-445c-b82d-90663a9e8b21" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 516.141559] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Lock "52eb3ce9-696c-445c-b82d-90663a9e8b21" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 516.142317] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg d5c10dfd244c4be2b58bdf3b598a8835 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 516.172878] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d5c10dfd244c4be2b58bdf3b598a8835 [ 516.645691] env[61273]: DEBUG nova.compute.manager [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 516.648019] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg e7d8a3fdb9ec45ff99f3b0428d7aa889 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 516.783391] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e7d8a3fdb9ec45ff99f3b0428d7aa889 [ 517.035180] env[61273]: DEBUG oslo_concurrency.lockutils [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Acquiring lock "986f91ac-e44f-474a-885a-bc097b396019" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 517.035395] env[61273]: DEBUG oslo_concurrency.lockutils [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Lock "986f91ac-e44f-474a-885a-bc097b396019" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 517.036291] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Expecting reply to msg 6502b870f764433d9ad71b6a1ebe05d8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 517.058112] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6502b870f764433d9ad71b6a1ebe05d8 [ 517.129565] env[61273]: DEBUG oslo_concurrency.lockutils [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] Acquiring lock "020a5b3a-bda7-4a8a-9dad-948cee5a7373" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 517.129691] env[61273]: DEBUG oslo_concurrency.lockutils [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] Lock "020a5b3a-bda7-4a8a-9dad-948cee5a7373" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 517.130285] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] Expecting reply to msg c602da6464aa44c09bf131f04113ba84 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 517.165323] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c602da6464aa44c09bf131f04113ba84 [ 517.221147] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 517.221486] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 517.223081] env[61273]: INFO nova.compute.claims [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 517.225684] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 18ce9692958d412b8e1b62bb476f2e9d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 517.328133] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 18ce9692958d412b8e1b62bb476f2e9d [ 517.363806] env[61273]: DEBUG oslo_concurrency.lockutils [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Acquiring lock "4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 517.364530] env[61273]: DEBUG oslo_concurrency.lockutils [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Lock "4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 517.365018] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Expecting reply to msg d39ce42b02464c27af2ff81d24af6efc in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 517.375427] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d39ce42b02464c27af2ff81d24af6efc [ 517.537993] env[61273]: DEBUG nova.compute.manager [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: 986f91ac-e44f-474a-885a-bc097b396019] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 517.539761] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Expecting reply to msg aa35cb9dcff445f7a31b9fb947bf9247 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 517.577659] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aa35cb9dcff445f7a31b9fb947bf9247 [ 517.632358] env[61273]: DEBUG nova.compute.manager [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 517.635484] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] Expecting reply to msg 3c16593ad4e1475eaf3c1471b70f7ca5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 517.689837] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3c16593ad4e1475eaf3c1471b70f7ca5 [ 517.728815] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 4c9bbc658dc1449fab056cc97e47e0e5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 517.745572] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4c9bbc658dc1449fab056cc97e47e0e5 [ 517.868716] env[61273]: DEBUG nova.compute.manager [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 517.870399] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Expecting reply to msg ff941aeac3fe4e41ac65e74231806c2e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 517.915729] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ff941aeac3fe4e41ac65e74231806c2e [ 518.062531] env[61273]: DEBUG oslo_concurrency.lockutils [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 518.154572] env[61273]: DEBUG oslo_concurrency.lockutils [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 518.351382] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fca95b99-6fdd-4343-9ddc-dadbd49aaeeb {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 518.360447] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d68c4ba7-95d9-465c-b080-8f7b79846ce5 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 518.398931] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-265e0b27-e828-48ef-a0f2-daccbf0f5008 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 518.407611] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3773456-2aac-471f-99f2-1556b3eedf69 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 518.413442] env[61273]: DEBUG oslo_concurrency.lockutils [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 518.421802] env[61273]: DEBUG nova.compute.provider_tree [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 518.422425] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg c37ebf0776ba4a5ab65064337e58031b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 518.435154] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c37ebf0776ba4a5ab65064337e58031b [ 518.442072] env[61273]: DEBUG oslo_concurrency.lockutils [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] Acquiring lock "8f2cba43-bdec-4455-b795-784b29e2ea5d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 518.442302] env[61273]: DEBUG oslo_concurrency.lockutils [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] Lock "8f2cba43-bdec-4455-b795-784b29e2ea5d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 518.442761] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] Expecting reply to msg 7d7629ebbc26452bb01bd142384275db in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 518.454130] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7d7629ebbc26452bb01bd142384275db [ 518.486796] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] Acquiring lock "ea496eae-68f9-43f1-b4cf-6743043c753b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 518.487679] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] Lock "ea496eae-68f9-43f1-b4cf-6743043c753b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 518.488212] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] Expecting reply to msg 4cb9763414e84f1bbb8ea5accc6b9e88 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 518.507230] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4cb9763414e84f1bbb8ea5accc6b9e88 [ 518.925098] env[61273]: DEBUG nova.scheduler.client.report [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 518.927466] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 1400e89cf38347cf90101fe8050275b3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 518.939327] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1400e89cf38347cf90101fe8050275b3 [ 518.944692] env[61273]: DEBUG nova.compute.manager [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 518.946075] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] Expecting reply to msg 0fee1a3d7944495980b0844130e8c584 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 518.990749] env[61273]: DEBUG nova.compute.manager [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 518.991967] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] Expecting reply to msg 9f2e288544d4410d9ed8dfb558b712e6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 518.993257] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0fee1a3d7944495980b0844130e8c584 [ 519.052172] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9f2e288544d4410d9ed8dfb558b712e6 [ 519.431024] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.209s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 519.431654] env[61273]: DEBUG nova.compute.manager [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 519.433429] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 1f3a1ccc9e004795b0bbbea18bb811e3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 519.434416] env[61273]: DEBUG oslo_concurrency.lockutils [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.372s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 519.435787] env[61273]: INFO nova.compute.claims [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: 986f91ac-e44f-474a-885a-bc097b396019] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 519.437284] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Expecting reply to msg b353163cbad94d27b705b449cedcbf88 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 519.445208] env[61273]: DEBUG oslo_concurrency.lockutils [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Acquiring lock "f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 519.445419] env[61273]: DEBUG oslo_concurrency.lockutils [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Lock "f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 519.445878] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Expecting reply to msg 52d480d127f540aebb93e74bc37dc5bf in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 519.463279] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 52d480d127f540aebb93e74bc37dc5bf [ 519.474779] env[61273]: DEBUG oslo_concurrency.lockutils [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 519.510944] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1f3a1ccc9e004795b0bbbea18bb811e3 [ 519.518589] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 519.521022] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b353163cbad94d27b705b449cedcbf88 [ 519.940901] env[61273]: DEBUG nova.compute.utils [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 519.941679] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg c8f2358d6eb84435a3912c33c74b68bf in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 519.942867] env[61273]: DEBUG nova.compute.manager [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 519.943095] env[61273]: DEBUG nova.network.neutron [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 519.951588] env[61273]: DEBUG nova.compute.manager [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 519.951588] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Expecting reply to msg 3a6802af268340e9a2b6e98e51669e62 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 519.956576] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Expecting reply to msg ff2bdad21c9e4854af3fa325b16d2168 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 519.965385] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ff2bdad21c9e4854af3fa325b16d2168 [ 519.970449] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c8f2358d6eb84435a3912c33c74b68bf [ 520.000715] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3a6802af268340e9a2b6e98e51669e62 [ 520.145962] env[61273]: DEBUG nova.policy [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3078a2af81b248f8b100f58ee66a5a86', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c06b50a7aaa742afbbd0c6fc56c3d131', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 520.448333] env[61273]: DEBUG nova.compute.manager [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 520.450123] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg abbd472d948142609e8cb08673548d00 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 520.471440] env[61273]: DEBUG nova.network.neutron [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] Successfully created port: d6dd597b-ff4a-4e48-b10a-96073004f3eb {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 520.476445] env[61273]: DEBUG oslo_concurrency.lockutils [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 520.495082] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg abbd472d948142609e8cb08673548d00 [ 520.581002] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2513d549-5b09-4be4-9503-69b5ea9b107b {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.590417] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caa89f2d-9ebc-4e9b-9e10-23b6e4eac323 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.628882] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-614e4f54-5613-434c-884e-6d2578270fd5 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.637274] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-440826d5-9a51-424d-b912-8c8f08f42d40 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.651580] env[61273]: DEBUG nova.compute.provider_tree [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 520.652328] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Expecting reply to msg 843113342913499b8d1cd1313a2de12f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 520.660678] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 843113342913499b8d1cd1313a2de12f [ 520.954744] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 443f59bf86004ea7a627b41f3c565165 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 521.000023] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 443f59bf86004ea7a627b41f3c565165 [ 521.159091] env[61273]: DEBUG nova.scheduler.client.report [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 521.161433] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Expecting reply to msg 43ff67435f3146ba89b57ba4c9048029 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 521.187399] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 43ff67435f3146ba89b57ba4c9048029 [ 521.459384] env[61273]: DEBUG nova.compute.manager [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 521.490999] env[61273]: DEBUG nova.virt.hardware [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 521.491237] env[61273]: DEBUG nova.virt.hardware [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 521.491385] env[61273]: DEBUG nova.virt.hardware [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 521.491615] env[61273]: DEBUG nova.virt.hardware [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 521.491783] env[61273]: DEBUG nova.virt.hardware [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 521.491942] env[61273]: DEBUG nova.virt.hardware [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 521.492160] env[61273]: DEBUG nova.virt.hardware [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 521.492315] env[61273]: DEBUG nova.virt.hardware [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 521.492806] env[61273]: DEBUG nova.virt.hardware [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 521.492949] env[61273]: DEBUG nova.virt.hardware [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 521.493118] env[61273]: DEBUG nova.virt.hardware [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 521.494694] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16796215-c96d-42b4-a7ae-2b50b79bd63a {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.509501] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfb12c44-cf93-42e9-8bcb-2b7ccc7f887e {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.527567] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6abe4c8b-776c-469c-b909-dcf1b8d79a6a {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.664092] env[61273]: DEBUG oslo_concurrency.lockutils [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.229s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 521.664923] env[61273]: DEBUG nova.compute.manager [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: 986f91ac-e44f-474a-885a-bc097b396019] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 521.666524] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Expecting reply to msg 2015bbe4872746deb08e396f8bbd5683 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 521.667562] env[61273]: DEBUG oslo_concurrency.lockutils [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.513s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 521.670366] env[61273]: INFO nova.compute.claims [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 521.670605] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] Expecting reply to msg 43248a399fd64b5b98c7e9155eefbc2c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 521.723705] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2015bbe4872746deb08e396f8bbd5683 [ 521.728575] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 43248a399fd64b5b98c7e9155eefbc2c [ 521.926292] env[61273]: ERROR nova.compute.manager [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port d6dd597b-ff4a-4e48-b10a-96073004f3eb, please check neutron logs for more information. [ 521.926292] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 521.926292] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 521.926292] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 521.926292] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 521.926292] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 521.926292] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 521.926292] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 521.926292] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 521.926292] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 521.926292] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 521.926292] env[61273]: ERROR nova.compute.manager raise self.value [ 521.926292] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 521.926292] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 521.926292] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 521.926292] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 521.926912] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 521.926912] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 521.926912] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port d6dd597b-ff4a-4e48-b10a-96073004f3eb, please check neutron logs for more information. [ 521.926912] env[61273]: ERROR nova.compute.manager [ 521.926912] env[61273]: Traceback (most recent call last): [ 521.926912] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 521.926912] env[61273]: listener.cb(fileno) [ 521.926912] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 521.926912] env[61273]: result = function(*args, **kwargs) [ 521.926912] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 521.926912] env[61273]: return func(*args, **kwargs) [ 521.926912] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 521.926912] env[61273]: raise e [ 521.926912] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 521.926912] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 521.926912] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 521.926912] env[61273]: created_port_ids = self._update_ports_for_instance( [ 521.926912] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 521.926912] env[61273]: with excutils.save_and_reraise_exception(): [ 521.926912] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 521.926912] env[61273]: self.force_reraise() [ 521.926912] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 521.926912] env[61273]: raise self.value [ 521.926912] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 521.926912] env[61273]: updated_port = self._update_port( [ 521.926912] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 521.926912] env[61273]: _ensure_no_port_binding_failure(port) [ 521.926912] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 521.926912] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 521.927611] env[61273]: nova.exception.PortBindingFailed: Binding failed for port d6dd597b-ff4a-4e48-b10a-96073004f3eb, please check neutron logs for more information. [ 521.927611] env[61273]: Removing descriptor: 15 [ 521.928029] env[61273]: ERROR nova.compute.manager [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port d6dd597b-ff4a-4e48-b10a-96073004f3eb, please check neutron logs for more information. [ 521.928029] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] Traceback (most recent call last): [ 521.928029] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 521.928029] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] yield resources [ 521.928029] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 521.928029] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] self.driver.spawn(context, instance, image_meta, [ 521.928029] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 521.928029] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] self._vmops.spawn(context, instance, image_meta, injected_files, [ 521.928029] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 521.928029] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] vm_ref = self.build_virtual_machine(instance, [ 521.928029] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 521.928320] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] vif_infos = vmwarevif.get_vif_info(self._session, [ 521.928320] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 521.928320] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] for vif in network_info: [ 521.928320] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 521.928320] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] return self._sync_wrapper(fn, *args, **kwargs) [ 521.928320] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 521.928320] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] self.wait() [ 521.928320] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 521.928320] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] self[:] = self._gt.wait() [ 521.928320] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 521.928320] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] return self._exit_event.wait() [ 521.928320] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 521.928320] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] result = hub.switch() [ 521.928627] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 521.928627] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] return self.greenlet.switch() [ 521.928627] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 521.928627] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] result = function(*args, **kwargs) [ 521.928627] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 521.928627] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] return func(*args, **kwargs) [ 521.928627] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 521.928627] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] raise e [ 521.928627] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 521.928627] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] nwinfo = self.network_api.allocate_for_instance( [ 521.928627] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 521.928627] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] created_port_ids = self._update_ports_for_instance( [ 521.928627] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 521.928936] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] with excutils.save_and_reraise_exception(): [ 521.928936] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 521.928936] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] self.force_reraise() [ 521.928936] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 521.928936] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] raise self.value [ 521.928936] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 521.928936] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] updated_port = self._update_port( [ 521.928936] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 521.928936] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] _ensure_no_port_binding_failure(port) [ 521.928936] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 521.928936] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] raise exception.PortBindingFailed(port_id=port['id']) [ 521.928936] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] nova.exception.PortBindingFailed: Binding failed for port d6dd597b-ff4a-4e48-b10a-96073004f3eb, please check neutron logs for more information. [ 521.928936] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] [ 521.929262] env[61273]: INFO nova.compute.manager [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] Terminating instance [ 521.933523] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Acquiring lock "refresh_cache-52eb3ce9-696c-445c-b82d-90663a9e8b21" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 521.933675] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Acquired lock "refresh_cache-52eb3ce9-696c-445c-b82d-90663a9e8b21" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 521.933832] env[61273]: DEBUG nova.network.neutron [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 521.934226] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg c0b7e93b76ea47cfa49580317c6420bd in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 521.939466] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Acquiring lock "40709d2b-cc33-4ac0-9a13-731442d7edff" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 521.939466] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Lock "40709d2b-cc33-4ac0-9a13-731442d7edff" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 521.939466] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Expecting reply to msg 144d2c3c92304961b071f01af8cf30a5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 521.943153] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c0b7e93b76ea47cfa49580317c6420bd [ 521.945291] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 144d2c3c92304961b071f01af8cf30a5 [ 522.173739] env[61273]: DEBUG nova.compute.utils [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 522.174331] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Expecting reply to msg d9c1d73d1bae4d73a5633b1c74da5634 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 522.176383] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] Expecting reply to msg f52f3d0303af4637afc6ea8afaf7ea01 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 522.177296] env[61273]: DEBUG nova.compute.manager [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: 986f91ac-e44f-474a-885a-bc097b396019] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 522.177526] env[61273]: DEBUG nova.network.neutron [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: 986f91ac-e44f-474a-885a-bc097b396019] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 522.187821] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d9c1d73d1bae4d73a5633b1c74da5634 [ 522.188425] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f52f3d0303af4637afc6ea8afaf7ea01 [ 522.248185] env[61273]: DEBUG nova.policy [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0c66923fec264dcf9cf04011a1463650', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3b11f5c3d754416d81b53d294f3f9631', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 522.441295] env[61273]: DEBUG nova.compute.manager [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 522.443384] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Expecting reply to msg b2a5feed0b44474f828224eb52aaed7f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 522.477164] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b2a5feed0b44474f828224eb52aaed7f [ 522.478633] env[61273]: DEBUG nova.network.neutron [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 522.625722] env[61273]: DEBUG nova.network.neutron [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 522.626286] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 4720221006e747ad8cf55788c1a6c0a5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 522.641472] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4720221006e747ad8cf55788c1a6c0a5 [ 522.677138] env[61273]: DEBUG nova.network.neutron [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: 986f91ac-e44f-474a-885a-bc097b396019] Successfully created port: 397126f4-dc7b-428e-9725-ba5c0ded1af3 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 522.680956] env[61273]: DEBUG nova.compute.manager [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: 986f91ac-e44f-474a-885a-bc097b396019] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 522.682659] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Expecting reply to msg fb9984280bea42fa8f21c69b5ac7face in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 522.737443] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fb9984280bea42fa8f21c69b5ac7face [ 522.820523] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c75b4edf-d3b7-47b3-bf90-64667ad05d42 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.830277] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c88582f-975c-4b01-8e9b-1cddc00ff5e6 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.878343] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa587aab-99b0-432e-9158-dd6c8a1681d2 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.887212] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-699ef5be-19db-49de-9bfa-9d0ec8418384 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.903601] env[61273]: DEBUG nova.compute.provider_tree [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 522.904136] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] Expecting reply to msg 61105fad7c9f4dda9a2a143ed52c26cb in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 522.911840] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 61105fad7c9f4dda9a2a143ed52c26cb [ 522.972274] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 523.128230] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Releasing lock "refresh_cache-52eb3ce9-696c-445c-b82d-90663a9e8b21" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 523.128667] env[61273]: DEBUG nova.compute.manager [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 523.128861] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 523.129151] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-58f61eae-127f-4fa5-982c-cc9e6cb738df {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.147374] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-542e1413-dcb4-4bfd-99f0-9315d4ea0cb1 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.164381] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 52eb3ce9-696c-445c-b82d-90663a9e8b21 could not be found. [ 523.165069] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 523.165176] env[61273]: INFO nova.compute.manager [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] Took 0.04 seconds to destroy the instance on the hypervisor. [ 523.165478] env[61273]: DEBUG oslo.service.loopingcall [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 523.165741] env[61273]: DEBUG nova.compute.manager [-] [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 523.165875] env[61273]: DEBUG nova.network.neutron [-] [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 523.196029] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Expecting reply to msg f68218dd9a3946139b392e3d89f37bf2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 523.206277] env[61273]: DEBUG nova.network.neutron [-] [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 523.206874] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 02d8e571998d4142869e7ed0996e092f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 523.220547] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 02d8e571998d4142869e7ed0996e092f [ 523.244709] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f68218dd9a3946139b392e3d89f37bf2 [ 523.408274] env[61273]: DEBUG nova.scheduler.client.report [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 523.409386] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] Expecting reply to msg 2a85d0e36ae944d7855720d2f16bfc33 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 523.422672] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2a85d0e36ae944d7855720d2f16bfc33 [ 523.597344] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] Acquiring lock "1784917b-8a7e-4974-b8b3-f8f2b3db019a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 523.597344] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] Lock "1784917b-8a7e-4974-b8b3-f8f2b3db019a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 523.597344] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] Expecting reply to msg c9d1665c29c94a97beb3109dee82f818 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 523.610257] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c9d1665c29c94a97beb3109dee82f818 [ 523.694952] env[61273]: DEBUG nova.compute.manager [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: 986f91ac-e44f-474a-885a-bc097b396019] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 523.709915] env[61273]: DEBUG nova.network.neutron [-] [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 523.709915] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 8de946a89ab044439c23f66472eea392 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 523.719340] env[61273]: DEBUG nova.virt.hardware [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 523.719985] env[61273]: DEBUG nova.virt.hardware [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 523.720446] env[61273]: DEBUG nova.virt.hardware [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 523.720866] env[61273]: DEBUG nova.virt.hardware [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 523.721163] env[61273]: DEBUG nova.virt.hardware [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 523.721602] env[61273]: DEBUG nova.virt.hardware [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 523.721951] env[61273]: DEBUG nova.virt.hardware [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 523.722243] env[61273]: DEBUG nova.virt.hardware [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 523.722767] env[61273]: DEBUG nova.virt.hardware [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 523.723082] env[61273]: DEBUG nova.virt.hardware [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 523.723392] env[61273]: DEBUG nova.virt.hardware [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 523.724676] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5502e2a4-1748-4147-ba30-83e3f842b265 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.727699] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8de946a89ab044439c23f66472eea392 [ 523.737628] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9567816b-ed00-4a66-991c-76e0ab0031ed {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.914025] env[61273]: ERROR nova.compute.manager [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 397126f4-dc7b-428e-9725-ba5c0ded1af3, please check neutron logs for more information. [ 523.914025] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 523.914025] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 523.914025] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 523.914025] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 523.914025] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 523.914025] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 523.914025] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 523.914025] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 523.914025] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 523.914025] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 523.914025] env[61273]: ERROR nova.compute.manager raise self.value [ 523.914025] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 523.914025] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 523.914025] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 523.914025] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 523.914506] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 523.914506] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 523.914506] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 397126f4-dc7b-428e-9725-ba5c0ded1af3, please check neutron logs for more information. [ 523.914506] env[61273]: ERROR nova.compute.manager [ 523.914506] env[61273]: Traceback (most recent call last): [ 523.914506] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 523.914506] env[61273]: listener.cb(fileno) [ 523.914506] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 523.914506] env[61273]: result = function(*args, **kwargs) [ 523.914506] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 523.914506] env[61273]: return func(*args, **kwargs) [ 523.914506] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 523.914506] env[61273]: raise e [ 523.914506] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 523.914506] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 523.914506] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 523.914506] env[61273]: created_port_ids = self._update_ports_for_instance( [ 523.914506] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 523.914506] env[61273]: with excutils.save_and_reraise_exception(): [ 523.914506] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 523.914506] env[61273]: self.force_reraise() [ 523.914506] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 523.914506] env[61273]: raise self.value [ 523.914506] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 523.914506] env[61273]: updated_port = self._update_port( [ 523.914506] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 523.914506] env[61273]: _ensure_no_port_binding_failure(port) [ 523.914506] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 523.914506] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 523.915320] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 397126f4-dc7b-428e-9725-ba5c0ded1af3, please check neutron logs for more information. [ 523.915320] env[61273]: Removing descriptor: 15 [ 523.915320] env[61273]: ERROR nova.compute.manager [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: 986f91ac-e44f-474a-885a-bc097b396019] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 397126f4-dc7b-428e-9725-ba5c0ded1af3, please check neutron logs for more information. [ 523.915320] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] Traceback (most recent call last): [ 523.915320] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 523.915320] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] yield resources [ 523.915320] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 523.915320] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] self.driver.spawn(context, instance, image_meta, [ 523.915320] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 523.915320] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] self._vmops.spawn(context, instance, image_meta, injected_files, [ 523.915320] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 523.915320] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] vm_ref = self.build_virtual_machine(instance, [ 523.915660] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 523.915660] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] vif_infos = vmwarevif.get_vif_info(self._session, [ 523.915660] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 523.915660] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] for vif in network_info: [ 523.915660] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 523.915660] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] return self._sync_wrapper(fn, *args, **kwargs) [ 523.915660] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 523.915660] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] self.wait() [ 523.915660] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 523.915660] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] self[:] = self._gt.wait() [ 523.915660] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 523.915660] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] return self._exit_event.wait() [ 523.915660] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 523.916056] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] result = hub.switch() [ 523.916056] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 523.916056] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] return self.greenlet.switch() [ 523.916056] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 523.916056] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] result = function(*args, **kwargs) [ 523.916056] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 523.916056] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] return func(*args, **kwargs) [ 523.916056] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 523.916056] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] raise e [ 523.916056] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 523.916056] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] nwinfo = self.network_api.allocate_for_instance( [ 523.916056] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 523.916056] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] created_port_ids = self._update_ports_for_instance( [ 523.916623] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 523.916623] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] with excutils.save_and_reraise_exception(): [ 523.916623] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 523.916623] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] self.force_reraise() [ 523.916623] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 523.916623] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] raise self.value [ 523.916623] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 523.916623] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] updated_port = self._update_port( [ 523.916623] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 523.916623] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] _ensure_no_port_binding_failure(port) [ 523.916623] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 523.916623] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] raise exception.PortBindingFailed(port_id=port['id']) [ 523.916951] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] nova.exception.PortBindingFailed: Binding failed for port 397126f4-dc7b-428e-9725-ba5c0ded1af3, please check neutron logs for more information. [ 523.916951] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] [ 523.916951] env[61273]: INFO nova.compute.manager [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: 986f91ac-e44f-474a-885a-bc097b396019] Terminating instance [ 523.916951] env[61273]: DEBUG oslo_concurrency.lockutils [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.245s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 523.916951] env[61273]: DEBUG nova.compute.manager [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 523.916951] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] Expecting reply to msg 1bc4fafd015340949bc03857d8ddb031 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 523.917188] env[61273]: DEBUG oslo_concurrency.lockutils [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Acquiring lock "refresh_cache-986f91ac-e44f-474a-885a-bc097b396019" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 523.917316] env[61273]: DEBUG oslo_concurrency.lockutils [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Acquired lock "refresh_cache-986f91ac-e44f-474a-885a-bc097b396019" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 523.917477] env[61273]: DEBUG nova.network.neutron [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: 986f91ac-e44f-474a-885a-bc097b396019] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 523.917846] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Expecting reply to msg 21fa018cafc54e36885e86df4d6d5a00 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 523.919265] env[61273]: DEBUG oslo_concurrency.lockutils [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.505s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 523.920347] env[61273]: INFO nova.compute.claims [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 523.921948] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Expecting reply to msg d28540d3e907440d92d8434b3706449e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 523.936610] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 21fa018cafc54e36885e86df4d6d5a00 [ 523.989336] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d28540d3e907440d92d8434b3706449e [ 524.004307] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1bc4fafd015340949bc03857d8ddb031 [ 524.097530] env[61273]: DEBUG nova.compute.manager [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 524.099362] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] Expecting reply to msg cbca364f1d0642f6b424fe3d81f7642f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 524.141035] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cbca364f1d0642f6b424fe3d81f7642f [ 524.212786] env[61273]: INFO nova.compute.manager [-] [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] Took 1.05 seconds to deallocate network for instance. [ 524.216547] env[61273]: DEBUG nova.compute.claims [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 524.216547] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 524.430087] env[61273]: DEBUG nova.compute.utils [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 524.430087] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] Expecting reply to msg 3a35b39676294ae081e9464d07056aeb in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 524.430087] env[61273]: DEBUG nova.compute.manager [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 524.430087] env[61273]: DEBUG nova.network.neutron [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 524.438402] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Expecting reply to msg 20de6683576d446180e6ed3a7933e009 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 524.446319] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 20de6683576d446180e6ed3a7933e009 [ 524.448407] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3a35b39676294ae081e9464d07056aeb [ 524.462082] env[61273]: DEBUG nova.network.neutron [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: 986f91ac-e44f-474a-885a-bc097b396019] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 524.628310] env[61273]: DEBUG nova.policy [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a8fa21bcd84747c0bca54261bdc441c4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '088552f0960945fab5e28d10e79cb726', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 524.634094] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 524.677089] env[61273]: DEBUG nova.network.neutron [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: 986f91ac-e44f-474a-885a-bc097b396019] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 524.677089] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Expecting reply to msg 79f49d301ad44af5885a7675370389a7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 524.687936] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 79f49d301ad44af5885a7675370389a7 [ 524.931592] env[61273]: DEBUG nova.compute.manager [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 524.934780] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] Expecting reply to msg 6cf633d24b7d49799ad62fbf7ff01daa in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 524.995727] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6cf633d24b7d49799ad62fbf7ff01daa [ 525.167777] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac8aa7d1-4fba-451e-8dde-0822b22544a8 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.175586] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c5aaa9a-6e36-42b9-98ec-0bb8f36fc5ee {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.179628] env[61273]: DEBUG oslo_concurrency.lockutils [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Releasing lock "refresh_cache-986f91ac-e44f-474a-885a-bc097b396019" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 525.180162] env[61273]: DEBUG nova.compute.manager [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: 986f91ac-e44f-474a-885a-bc097b396019] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 525.180391] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: 986f91ac-e44f-474a-885a-bc097b396019] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 525.180651] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d0ae2fd1-a9ba-458b-b1d4-bafd50c9ca7f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.216661] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5b70603-22fd-4dd1-8317-39f8c4213ffb {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.233466] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6ea2b43-7ea2-42dd-a3d3-daf373410f8a {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.258364] env[61273]: DEBUG nova.network.neutron [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] Successfully created port: 4e2bb2c8-5529-4001-9487-43fbe76c2658 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 525.261269] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-575d620f-f3ca-4d1d-84b3-22e7a859dbb3 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.277349] env[61273]: DEBUG nova.compute.provider_tree [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 525.277924] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Expecting reply to msg 0bb2191c5b9b4c5ebe86162f2306c992 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 525.283488] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: 986f91ac-e44f-474a-885a-bc097b396019] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 986f91ac-e44f-474a-885a-bc097b396019 could not be found. [ 525.283727] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: 986f91ac-e44f-474a-885a-bc097b396019] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 525.283929] env[61273]: INFO nova.compute.manager [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: 986f91ac-e44f-474a-885a-bc097b396019] Took 0.10 seconds to destroy the instance on the hypervisor. [ 525.284197] env[61273]: DEBUG oslo.service.loopingcall [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 525.286501] env[61273]: DEBUG nova.compute.manager [-] [instance: 986f91ac-e44f-474a-885a-bc097b396019] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 525.286617] env[61273]: DEBUG nova.network.neutron [-] [instance: 986f91ac-e44f-474a-885a-bc097b396019] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 525.288616] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0bb2191c5b9b4c5ebe86162f2306c992 [ 525.310539] env[61273]: DEBUG nova.network.neutron [-] [instance: 986f91ac-e44f-474a-885a-bc097b396019] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 525.311664] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg df20e34aa294472d83c06346a3258969 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 525.320467] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg df20e34aa294472d83c06346a3258969 [ 525.445242] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] Expecting reply to msg 315168f8c49a40049839f4af121bcc2d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 525.495994] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 315168f8c49a40049839f4af121bcc2d [ 525.540223] env[61273]: DEBUG nova.compute.manager [req-6a55ecf2-7662-4ed3-9b7f-4454ddcd0d3e req-20de6af9-6090-4330-af50-613e5a0444fc service nova] [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] Received event network-changed-d6dd597b-ff4a-4e48-b10a-96073004f3eb {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 525.540223] env[61273]: DEBUG nova.compute.manager [req-6a55ecf2-7662-4ed3-9b7f-4454ddcd0d3e req-20de6af9-6090-4330-af50-613e5a0444fc service nova] [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] Refreshing instance network info cache due to event network-changed-d6dd597b-ff4a-4e48-b10a-96073004f3eb. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 525.540223] env[61273]: DEBUG oslo_concurrency.lockutils [req-6a55ecf2-7662-4ed3-9b7f-4454ddcd0d3e req-20de6af9-6090-4330-af50-613e5a0444fc service nova] Acquiring lock "refresh_cache-52eb3ce9-696c-445c-b82d-90663a9e8b21" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 525.540223] env[61273]: DEBUG oslo_concurrency.lockutils [req-6a55ecf2-7662-4ed3-9b7f-4454ddcd0d3e req-20de6af9-6090-4330-af50-613e5a0444fc service nova] Acquired lock "refresh_cache-52eb3ce9-696c-445c-b82d-90663a9e8b21" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 525.540223] env[61273]: DEBUG nova.network.neutron [req-6a55ecf2-7662-4ed3-9b7f-4454ddcd0d3e req-20de6af9-6090-4330-af50-613e5a0444fc service nova] [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] Refreshing network info cache for port d6dd597b-ff4a-4e48-b10a-96073004f3eb {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 525.540720] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-6a55ecf2-7662-4ed3-9b7f-4454ddcd0d3e req-20de6af9-6090-4330-af50-613e5a0444fc service nova] Expecting reply to msg 16671b347d6149c0bd3f94924b3e8d61 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 525.551787] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 16671b347d6149c0bd3f94924b3e8d61 [ 525.789114] env[61273]: DEBUG nova.scheduler.client.report [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 525.789114] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Expecting reply to msg eb89c2f282784b08a1eb494951eed6ac in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 525.803537] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eb89c2f282784b08a1eb494951eed6ac [ 525.813759] env[61273]: DEBUG nova.network.neutron [-] [instance: 986f91ac-e44f-474a-885a-bc097b396019] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 525.813759] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 183b80482450433aa38adf64c17ea661 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 525.822694] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 183b80482450433aa38adf64c17ea661 [ 525.948672] env[61273]: DEBUG nova.compute.manager [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 525.976686] env[61273]: DEBUG nova.virt.hardware [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 525.976917] env[61273]: DEBUG nova.virt.hardware [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 525.977092] env[61273]: DEBUG nova.virt.hardware [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 525.977330] env[61273]: DEBUG nova.virt.hardware [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 525.977426] env[61273]: DEBUG nova.virt.hardware [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 525.977577] env[61273]: DEBUG nova.virt.hardware [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 525.977776] env[61273]: DEBUG nova.virt.hardware [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 525.978765] env[61273]: DEBUG nova.virt.hardware [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 525.978765] env[61273]: DEBUG nova.virt.hardware [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 525.978765] env[61273]: DEBUG nova.virt.hardware [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 525.978765] env[61273]: DEBUG nova.virt.hardware [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 525.979228] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37a24ab2-ea45-4126-8d9e-b39ec38ac305 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.987492] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-784d9df9-f4c8-4105-b3ee-c81f87e29e25 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.082353] env[61273]: DEBUG nova.network.neutron [req-6a55ecf2-7662-4ed3-9b7f-4454ddcd0d3e req-20de6af9-6090-4330-af50-613e5a0444fc service nova] [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 526.289235] env[61273]: DEBUG nova.network.neutron [req-6a55ecf2-7662-4ed3-9b7f-4454ddcd0d3e req-20de6af9-6090-4330-af50-613e5a0444fc service nova] [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 526.289235] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-6a55ecf2-7662-4ed3-9b7f-4454ddcd0d3e req-20de6af9-6090-4330-af50-613e5a0444fc service nova] Expecting reply to msg 770a291d595348d4ba06e0aa65b381a7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 526.293543] env[61273]: DEBUG oslo_concurrency.lockutils [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.372s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 526.293543] env[61273]: DEBUG nova.compute.manager [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 526.293543] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Expecting reply to msg 123a206889bf4fe8bd8b3fb43a2b0be2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 526.294546] env[61273]: DEBUG oslo_concurrency.lockutils [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.820s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 526.299828] env[61273]: INFO nova.compute.claims [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 526.302138] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] Expecting reply to msg 335c08695480441ab5c87152b17e9be0 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 526.304755] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 770a291d595348d4ba06e0aa65b381a7 [ 526.315840] env[61273]: INFO nova.compute.manager [-] [instance: 986f91ac-e44f-474a-885a-bc097b396019] Took 1.03 seconds to deallocate network for instance. [ 526.319025] env[61273]: DEBUG nova.compute.claims [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: 986f91ac-e44f-474a-885a-bc097b396019] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 526.320025] env[61273]: DEBUG oslo_concurrency.lockutils [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 526.346913] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 123a206889bf4fe8bd8b3fb43a2b0be2 [ 526.351287] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 335c08695480441ab5c87152b17e9be0 [ 526.791389] env[61273]: DEBUG oslo_concurrency.lockutils [req-6a55ecf2-7662-4ed3-9b7f-4454ddcd0d3e req-20de6af9-6090-4330-af50-613e5a0444fc service nova] Releasing lock "refresh_cache-52eb3ce9-696c-445c-b82d-90663a9e8b21" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 526.797451] env[61273]: DEBUG nova.compute.utils [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 526.797451] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Expecting reply to msg e0582ce87119468cabece15d4c93b645 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 526.797451] env[61273]: DEBUG nova.compute.manager [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Not allocating networking since 'none' was specified. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 526.807473] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] Expecting reply to msg af6535290ce94681bae63dc1a5449d06 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 526.815772] env[61273]: ERROR nova.compute.manager [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 4e2bb2c8-5529-4001-9487-43fbe76c2658, please check neutron logs for more information. [ 526.815772] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 526.815772] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 526.815772] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 526.815772] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 526.815772] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 526.815772] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 526.815772] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 526.815772] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 526.815772] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 526.815772] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 526.815772] env[61273]: ERROR nova.compute.manager raise self.value [ 526.815772] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 526.815772] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 526.815772] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 526.815772] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 526.816334] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 526.816334] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 526.816334] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 4e2bb2c8-5529-4001-9487-43fbe76c2658, please check neutron logs for more information. [ 526.816334] env[61273]: ERROR nova.compute.manager [ 526.816334] env[61273]: Traceback (most recent call last): [ 526.816334] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 526.816334] env[61273]: listener.cb(fileno) [ 526.816334] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 526.816334] env[61273]: result = function(*args, **kwargs) [ 526.816334] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 526.816334] env[61273]: return func(*args, **kwargs) [ 526.816334] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 526.816334] env[61273]: raise e [ 526.816334] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 526.816334] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 526.816334] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 526.816334] env[61273]: created_port_ids = self._update_ports_for_instance( [ 526.816334] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 526.816334] env[61273]: with excutils.save_and_reraise_exception(): [ 526.816334] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 526.816334] env[61273]: self.force_reraise() [ 526.816334] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 526.816334] env[61273]: raise self.value [ 526.816334] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 526.816334] env[61273]: updated_port = self._update_port( [ 526.816334] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 526.816334] env[61273]: _ensure_no_port_binding_failure(port) [ 526.816334] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 526.816334] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 526.817140] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 4e2bb2c8-5529-4001-9487-43fbe76c2658, please check neutron logs for more information. [ 526.817140] env[61273]: Removing descriptor: 15 [ 526.817140] env[61273]: ERROR nova.compute.manager [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 4e2bb2c8-5529-4001-9487-43fbe76c2658, please check neutron logs for more information. [ 526.817140] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] Traceback (most recent call last): [ 526.817140] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 526.817140] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] yield resources [ 526.817140] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 526.817140] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] self.driver.spawn(context, instance, image_meta, [ 526.817140] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 526.817140] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] self._vmops.spawn(context, instance, image_meta, injected_files, [ 526.817140] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 526.817140] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] vm_ref = self.build_virtual_machine(instance, [ 526.817497] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 526.817497] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] vif_infos = vmwarevif.get_vif_info(self._session, [ 526.817497] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 526.817497] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] for vif in network_info: [ 526.817497] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 526.817497] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] return self._sync_wrapper(fn, *args, **kwargs) [ 526.817497] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 526.817497] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] self.wait() [ 526.817497] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 526.817497] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] self[:] = self._gt.wait() [ 526.817497] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 526.817497] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] return self._exit_event.wait() [ 526.817497] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 526.817872] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] result = hub.switch() [ 526.817872] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 526.817872] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] return self.greenlet.switch() [ 526.817872] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 526.817872] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] result = function(*args, **kwargs) [ 526.817872] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 526.817872] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] return func(*args, **kwargs) [ 526.817872] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 526.817872] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] raise e [ 526.817872] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 526.817872] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] nwinfo = self.network_api.allocate_for_instance( [ 526.817872] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 526.817872] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] created_port_ids = self._update_ports_for_instance( [ 526.818233] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 526.818233] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] with excutils.save_and_reraise_exception(): [ 526.818233] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 526.818233] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] self.force_reraise() [ 526.818233] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 526.818233] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] raise self.value [ 526.818233] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 526.818233] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] updated_port = self._update_port( [ 526.818233] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 526.818233] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] _ensure_no_port_binding_failure(port) [ 526.818233] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 526.818233] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] raise exception.PortBindingFailed(port_id=port['id']) [ 526.818570] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] nova.exception.PortBindingFailed: Binding failed for port 4e2bb2c8-5529-4001-9487-43fbe76c2658, please check neutron logs for more information. [ 526.818570] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] [ 526.818570] env[61273]: INFO nova.compute.manager [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] Terminating instance [ 526.820592] env[61273]: DEBUG oslo_concurrency.lockutils [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] Acquiring lock "refresh_cache-020a5b3a-bda7-4a8a-9dad-948cee5a7373" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 526.821163] env[61273]: DEBUG oslo_concurrency.lockutils [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] Acquired lock "refresh_cache-020a5b3a-bda7-4a8a-9dad-948cee5a7373" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 526.821388] env[61273]: DEBUG nova.network.neutron [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 526.821885] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] Expecting reply to msg aa0bbd74c29848fc8972c774b203de6a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 526.822984] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg af6535290ce94681bae63dc1a5449d06 [ 526.830424] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e0582ce87119468cabece15d4c93b645 [ 526.835388] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aa0bbd74c29848fc8972c774b203de6a [ 527.299725] env[61273]: DEBUG nova.compute.manager [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 527.301466] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Expecting reply to msg 097e194c31e14e16a29970854cbb7d09 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 527.354308] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 097e194c31e14e16a29970854cbb7d09 [ 527.363932] env[61273]: DEBUG nova.network.neutron [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 527.497724] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c886459-ee61-4d18-bbce-973f9385b47f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.498305] env[61273]: DEBUG nova.network.neutron [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 527.498465] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] Expecting reply to msg 7a0f53c05f2c4257a786b2d8deaea144 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 527.507512] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12e92d5a-e659-4329-b71b-ede11c516d25 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.543762] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7a0f53c05f2c4257a786b2d8deaea144 [ 527.544748] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b306216-b601-4d06-aef1-bfeab9190151 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.553281] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ecc9e5d-3887-47cd-8cea-0a13993692f4 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.569426] env[61273]: DEBUG nova.compute.provider_tree [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 527.569950] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] Expecting reply to msg 92f49186816a4b1ea78bbe73a786e708 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 527.580225] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 92f49186816a4b1ea78bbe73a786e708 [ 527.617211] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Acquiring lock "228821ca-e981-405b-8952-8a1718103d3c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 527.617476] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Lock "228821ca-e981-405b-8952-8a1718103d3c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 527.617969] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Expecting reply to msg 65a18e0de89c44548c68cc845226a049 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 527.642442] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 65a18e0de89c44548c68cc845226a049 [ 527.805835] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Expecting reply to msg b02a2b1c11b64759910d71d860d3a4e6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 527.852985] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b02a2b1c11b64759910d71d860d3a4e6 [ 528.005424] env[61273]: DEBUG oslo_concurrency.lockutils [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] Releasing lock "refresh_cache-020a5b3a-bda7-4a8a-9dad-948cee5a7373" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 528.005424] env[61273]: DEBUG nova.compute.manager [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 528.005424] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 528.005424] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fa488222-7c9d-411d-ac11-12f85a1a7154 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.011401] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c11620b1-b42f-4bc9-b941-10edf17962d9 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.040453] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 020a5b3a-bda7-4a8a-9dad-948cee5a7373 could not be found. [ 528.040453] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 528.040453] env[61273]: INFO nova.compute.manager [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] Took 0.04 seconds to destroy the instance on the hypervisor. [ 528.040453] env[61273]: DEBUG oslo.service.loopingcall [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 528.040453] env[61273]: DEBUG nova.compute.manager [-] [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 528.040453] env[61273]: DEBUG nova.network.neutron [-] [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 528.066262] env[61273]: DEBUG nova.network.neutron [-] [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 528.066798] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg b0e809b0550f4119890e39dd5b491bed in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 528.072648] env[61273]: DEBUG nova.scheduler.client.report [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 528.074952] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] Expecting reply to msg 61d492a35a7240afa2f98c69f120b6e7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 528.081158] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b0e809b0550f4119890e39dd5b491bed [ 528.088376] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 61d492a35a7240afa2f98c69f120b6e7 [ 528.122894] env[61273]: DEBUG nova.compute.manager [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 228821ca-e981-405b-8952-8a1718103d3c] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 528.124620] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Expecting reply to msg 05678f05787949d0a965f76c6ba54d8f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 528.197721] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 05678f05787949d0a965f76c6ba54d8f [ 528.309325] env[61273]: DEBUG nova.compute.manager [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 528.337379] env[61273]: DEBUG nova.virt.hardware [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 528.337379] env[61273]: DEBUG nova.virt.hardware [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 528.337379] env[61273]: DEBUG nova.virt.hardware [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 528.337379] env[61273]: DEBUG nova.virt.hardware [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 528.337661] env[61273]: DEBUG nova.virt.hardware [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 528.337661] env[61273]: DEBUG nova.virt.hardware [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 528.337661] env[61273]: DEBUG nova.virt.hardware [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 528.337661] env[61273]: DEBUG nova.virt.hardware [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 528.337661] env[61273]: DEBUG nova.virt.hardware [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 528.337814] env[61273]: DEBUG nova.virt.hardware [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 528.337814] env[61273]: DEBUG nova.virt.hardware [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 528.337814] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bee57612-cb66-4ab8-b465-ca52b704fb3e {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.345739] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3646a1e8-3acb-49da-981f-82d02278a852 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.366151] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Instance VIF info [] {{(pid=61273) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 528.378931] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=61273) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 528.381217] env[61273]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-72aa8522-6b9a-45ad-ac55-98be0cdad444 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.385183] env[61273]: DEBUG nova.compute.manager [req-6a1e72ab-512b-412e-bf5a-b3c81b17adf1 req-474526b3-584a-4418-a48e-72c7ffc5d5a8 service nova] [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] Received event network-vif-deleted-d6dd597b-ff4a-4e48-b10a-96073004f3eb {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 528.385584] env[61273]: DEBUG nova.compute.manager [req-6a1e72ab-512b-412e-bf5a-b3c81b17adf1 req-474526b3-584a-4418-a48e-72c7ffc5d5a8 service nova] [instance: 986f91ac-e44f-474a-885a-bc097b396019] Received event network-changed-397126f4-dc7b-428e-9725-ba5c0ded1af3 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 528.386637] env[61273]: DEBUG nova.compute.manager [req-6a1e72ab-512b-412e-bf5a-b3c81b17adf1 req-474526b3-584a-4418-a48e-72c7ffc5d5a8 service nova] [instance: 986f91ac-e44f-474a-885a-bc097b396019] Refreshing instance network info cache due to event network-changed-397126f4-dc7b-428e-9725-ba5c0ded1af3. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 528.386968] env[61273]: DEBUG oslo_concurrency.lockutils [req-6a1e72ab-512b-412e-bf5a-b3c81b17adf1 req-474526b3-584a-4418-a48e-72c7ffc5d5a8 service nova] Acquiring lock "refresh_cache-986f91ac-e44f-474a-885a-bc097b396019" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 528.387210] env[61273]: DEBUG oslo_concurrency.lockutils [req-6a1e72ab-512b-412e-bf5a-b3c81b17adf1 req-474526b3-584a-4418-a48e-72c7ffc5d5a8 service nova] Acquired lock "refresh_cache-986f91ac-e44f-474a-885a-bc097b396019" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 528.387592] env[61273]: DEBUG nova.network.neutron [req-6a1e72ab-512b-412e-bf5a-b3c81b17adf1 req-474526b3-584a-4418-a48e-72c7ffc5d5a8 service nova] [instance: 986f91ac-e44f-474a-885a-bc097b396019] Refreshing network info cache for port 397126f4-dc7b-428e-9725-ba5c0ded1af3 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 528.388320] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-6a1e72ab-512b-412e-bf5a-b3c81b17adf1 req-474526b3-584a-4418-a48e-72c7ffc5d5a8 service nova] Expecting reply to msg 48e8f9a4476149b7850eaf65dbe3cc2b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 528.395345] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 48e8f9a4476149b7850eaf65dbe3cc2b [ 528.400137] env[61273]: INFO nova.virt.vmwareapi.vm_util [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Created folder: OpenStack in parent group-v4. [ 528.400473] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Creating folder: Project (e50b6ecfe84a44d6821b987722cfa474). Parent ref: group-v103328. {{(pid=61273) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 528.401181] env[61273]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-269888fb-6099-4ea4-95ce-e31a58c5a3ce {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.412262] env[61273]: INFO nova.virt.vmwareapi.vm_util [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Created folder: Project (e50b6ecfe84a44d6821b987722cfa474) in parent group-v103328. [ 528.412899] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Creating folder: Instances. Parent ref: group-v103329. {{(pid=61273) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 528.413348] env[61273]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6b0d8d14-83d3-4695-bf40-4966be4c2a97 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.423621] env[61273]: INFO nova.virt.vmwareapi.vm_util [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Created folder: Instances in parent group-v103329. [ 528.424056] env[61273]: DEBUG oslo.service.loopingcall [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 528.424357] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Creating VM on the ESX host {{(pid=61273) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 528.425089] env[61273]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0c58a6ad-3bec-4290-aed6-45cc3bef9dab {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.443132] env[61273]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 528.443132] env[61273]: value = "task-375216" [ 528.443132] env[61273]: _type = "Task" [ 528.443132] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 528.452596] env[61273]: DEBUG oslo_vmware.api [-] Task: {'id': task-375216, 'name': CreateVM_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 528.573529] env[61273]: DEBUG nova.network.neutron [-] [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 528.573529] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 35c2d35504b44f3cb2d6f9adeab36a10 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 528.581474] env[61273]: DEBUG oslo_concurrency.lockutils [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.285s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 528.581474] env[61273]: DEBUG nova.compute.manager [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 528.581474] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] Expecting reply to msg b6c5c8f3a60d4161ab7cea7d306d2b06 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 528.582509] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 35c2d35504b44f3cb2d6f9adeab36a10 [ 528.583225] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.065s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 528.585187] env[61273]: INFO nova.compute.claims [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 528.586650] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] Expecting reply to msg 1d19e0f3bc88472cba13715d17391bcd in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 528.644717] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 528.650005] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b6c5c8f3a60d4161ab7cea7d306d2b06 [ 528.660390] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1d19e0f3bc88472cba13715d17391bcd [ 528.912986] env[61273]: DEBUG oslo_concurrency.lockutils [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Acquiring lock "62c3b24d-bee7-4dd2-a6c7-9303c7c28cca" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 528.912986] env[61273]: DEBUG oslo_concurrency.lockutils [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Lock "62c3b24d-bee7-4dd2-a6c7-9303c7c28cca" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 528.955489] env[61273]: DEBUG oslo_vmware.api [-] Task: {'id': task-375216, 'name': CreateVM_Task, 'duration_secs': 0.329736} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 528.956471] env[61273]: DEBUG nova.network.neutron [req-6a1e72ab-512b-412e-bf5a-b3c81b17adf1 req-474526b3-584a-4418-a48e-72c7ffc5d5a8 service nova] [instance: 986f91ac-e44f-474a-885a-bc097b396019] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 528.958501] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Created VM on the ESX host {{(pid=61273) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 528.960401] env[61273]: DEBUG oslo_vmware.service [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fdb1ec1-16ab-4f79-a385-7ce057044541 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.968457] env[61273]: DEBUG oslo_concurrency.lockutils [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 528.968823] env[61273]: DEBUG oslo_concurrency.lockutils [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 528.970193] env[61273]: DEBUG oslo_concurrency.lockutils [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 528.970555] env[61273]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a0596d3d-21d2-4c1a-8027-2162240b1b51 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.975498] env[61273]: DEBUG oslo_vmware.api [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Waiting for the task: (returnval){ [ 528.975498] env[61273]: value = "session[527e49f3-ce92-061c-8407-29c8a2392124]526025a4-83b8-0ae9-3c66-cc8b7a642fad" [ 528.975498] env[61273]: _type = "Task" [ 528.975498] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 528.985070] env[61273]: DEBUG oslo_vmware.api [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]526025a4-83b8-0ae9-3c66-cc8b7a642fad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 529.083976] env[61273]: INFO nova.compute.manager [-] [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] Took 1.05 seconds to deallocate network for instance. [ 529.087094] env[61273]: DEBUG nova.compute.claims [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 529.087357] env[61273]: DEBUG oslo_concurrency.lockutils [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 529.090606] env[61273]: DEBUG nova.compute.utils [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 529.096096] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] Expecting reply to msg 52e86030a6e248889882a58ae34d634b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 529.096096] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] Expecting reply to msg 1ceef263515741c19d21039ed4d74d95 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 529.096096] env[61273]: DEBUG nova.compute.manager [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 529.096096] env[61273]: DEBUG nova.network.neutron [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 529.099397] env[61273]: DEBUG nova.network.neutron [req-6a1e72ab-512b-412e-bf5a-b3c81b17adf1 req-474526b3-584a-4418-a48e-72c7ffc5d5a8 service nova] [instance: 986f91ac-e44f-474a-885a-bc097b396019] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 529.099980] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-6a1e72ab-512b-412e-bf5a-b3c81b17adf1 req-474526b3-584a-4418-a48e-72c7ffc5d5a8 service nova] Expecting reply to msg a1fc74c6eb1147d1b31de474b52be5bd in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 529.114307] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 52e86030a6e248889882a58ae34d634b [ 529.118495] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a1fc74c6eb1147d1b31de474b52be5bd [ 529.119064] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1ceef263515741c19d21039ed4d74d95 [ 529.240654] env[61273]: DEBUG nova.policy [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'af469e3bdb504baeaa90377cd88815cf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9562b8ed4a894a4da10f11866133a628', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 529.486737] env[61273]: DEBUG oslo_concurrency.lockutils [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 529.486983] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Processing image 4a9e718e-a6a1-4b4a-b567-8e55529b2d5b {{(pid=61273) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 529.487223] env[61273]: DEBUG oslo_concurrency.lockutils [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 529.487364] env[61273]: DEBUG oslo_concurrency.lockutils [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 529.487880] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61273) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 529.488016] env[61273]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2ac43092-3b64-438e-9e82-b44a96a2feb0 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.505632] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61273) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 529.505814] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61273) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 529.506597] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc310a58-8d2a-497c-bd86-9b1a2e5468a6 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.513601] env[61273]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0c15e877-951b-4fa2-9908-db8fedecb1fc {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.532888] env[61273]: DEBUG oslo_vmware.api [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Waiting for the task: (returnval){ [ 529.532888] env[61273]: value = "session[527e49f3-ce92-061c-8407-29c8a2392124]5271995c-f60d-944c-3e5c-fdda561999e7" [ 529.532888] env[61273]: _type = "Task" [ 529.532888] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 529.542098] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Preparing fetch location {{(pid=61273) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 529.542098] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Creating directory with path [datastore1] vmware_temp/d74623c3-f3ac-4e7b-a4c7-9b6f6aa2716f/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b {{(pid=61273) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 529.542098] env[61273]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d96c81bd-0a97-4cad-8884-42c1c7c40901 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.563416] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Created directory with path [datastore1] vmware_temp/d74623c3-f3ac-4e7b-a4c7-9b6f6aa2716f/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b {{(pid=61273) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 529.563709] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Fetch image to [datastore1] vmware_temp/d74623c3-f3ac-4e7b-a4c7-9b6f6aa2716f/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/tmp-sparse.vmdk {{(pid=61273) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 529.563907] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Downloading image file data 4a9e718e-a6a1-4b4a-b567-8e55529b2d5b to [datastore1] vmware_temp/d74623c3-f3ac-4e7b-a4c7-9b6f6aa2716f/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/tmp-sparse.vmdk on the data store datastore1 {{(pid=61273) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 529.564836] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38a79056-1931-4aa3-b53c-2977fc142079 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.596347] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fa4a76b-0f2c-4ad6-9e0e-9db410567f87 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.608142] env[61273]: DEBUG nova.compute.manager [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 529.609852] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] Expecting reply to msg 543b9d9fcd6447e8b0ac84a31f031847 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 529.611498] env[61273]: DEBUG oslo_concurrency.lockutils [req-6a1e72ab-512b-412e-bf5a-b3c81b17adf1 req-474526b3-584a-4418-a48e-72c7ffc5d5a8 service nova] Releasing lock "refresh_cache-986f91ac-e44f-474a-885a-bc097b396019" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 529.611752] env[61273]: DEBUG nova.compute.manager [req-6a1e72ab-512b-412e-bf5a-b3c81b17adf1 req-474526b3-584a-4418-a48e-72c7ffc5d5a8 service nova] [instance: 986f91ac-e44f-474a-885a-bc097b396019] Received event network-vif-deleted-397126f4-dc7b-428e-9725-ba5c0ded1af3 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 529.611933] env[61273]: DEBUG nova.compute.manager [req-6a1e72ab-512b-412e-bf5a-b3c81b17adf1 req-474526b3-584a-4418-a48e-72c7ffc5d5a8 service nova] [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] Received event network-changed-4e2bb2c8-5529-4001-9487-43fbe76c2658 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 529.612147] env[61273]: DEBUG nova.compute.manager [req-6a1e72ab-512b-412e-bf5a-b3c81b17adf1 req-474526b3-584a-4418-a48e-72c7ffc5d5a8 service nova] [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] Refreshing instance network info cache due to event network-changed-4e2bb2c8-5529-4001-9487-43fbe76c2658. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 529.612296] env[61273]: DEBUG oslo_concurrency.lockutils [req-6a1e72ab-512b-412e-bf5a-b3c81b17adf1 req-474526b3-584a-4418-a48e-72c7ffc5d5a8 service nova] Acquiring lock "refresh_cache-020a5b3a-bda7-4a8a-9dad-948cee5a7373" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 529.613309] env[61273]: DEBUG oslo_concurrency.lockutils [req-6a1e72ab-512b-412e-bf5a-b3c81b17adf1 req-474526b3-584a-4418-a48e-72c7ffc5d5a8 service nova] Acquired lock "refresh_cache-020a5b3a-bda7-4a8a-9dad-948cee5a7373" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 529.613525] env[61273]: DEBUG nova.network.neutron [req-6a1e72ab-512b-412e-bf5a-b3c81b17adf1 req-474526b3-584a-4418-a48e-72c7ffc5d5a8 service nova] [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] Refreshing network info cache for port 4e2bb2c8-5529-4001-9487-43fbe76c2658 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 529.614661] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-6a1e72ab-512b-412e-bf5a-b3c81b17adf1 req-474526b3-584a-4418-a48e-72c7ffc5d5a8 service nova] Expecting reply to msg 7ee85f85bbda441281b5ce303f5690a4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 529.622680] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1585974-989e-430d-96f4-62106c2fcc7d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.666742] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7ee85f85bbda441281b5ce303f5690a4 [ 529.667929] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60befa35-5b4d-4839-b43b-bfe1c9df1007 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.678531] env[61273]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-5f32b80a-19a4-49f6-8b85-e72ee012e53e {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.716814] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 543b9d9fcd6447e8b0ac84a31f031847 [ 529.777935] env[61273]: DEBUG nova.virt.vmwareapi.images [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Downloading image file data 4a9e718e-a6a1-4b4a-b567-8e55529b2d5b to the data store datastore1 {{(pid=61273) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 529.879804] env[61273]: DEBUG oslo_vmware.rw_handles [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d74623c3-f3ac-4e7b-a4c7-9b6f6aa2716f/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61273) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 529.968567] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] Acquiring lock "1d64d913-45f0-4768-8375-7863d9ae43c3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 529.968886] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] Lock "1d64d913-45f0-4768-8375-7863d9ae43c3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 530.115903] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] Expecting reply to msg 216b6e79c4ee43518eaa9d319320f117 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 530.125211] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1049e9b6-87eb-4608-a527-b7db66b2bd73 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.141026] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f735d2db-191e-44c0-ad89-645966c55f6a {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.190996] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 216b6e79c4ee43518eaa9d319320f117 [ 530.192190] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c1b0bfe-b4a2-4a83-be3d-1c288e31cc82 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.210983] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36d76821-f3a5-4003-b268-bf52ed270eb0 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.220635] env[61273]: DEBUG nova.network.neutron [req-6a1e72ab-512b-412e-bf5a-b3c81b17adf1 req-474526b3-584a-4418-a48e-72c7ffc5d5a8 service nova] [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 530.233615] env[61273]: DEBUG nova.compute.provider_tree [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 530.234235] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] Expecting reply to msg c6102a9ac9cc4373a4e695d58f5510ba in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 530.243779] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c6102a9ac9cc4373a4e695d58f5510ba [ 530.262251] env[61273]: DEBUG nova.network.neutron [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] Successfully created port: 86deca19-3809-4ccf-a3db-0d79d610d6c5 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 530.535638] env[61273]: DEBUG nova.network.neutron [req-6a1e72ab-512b-412e-bf5a-b3c81b17adf1 req-474526b3-584a-4418-a48e-72c7ffc5d5a8 service nova] [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 530.536191] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-6a1e72ab-512b-412e-bf5a-b3c81b17adf1 req-474526b3-584a-4418-a48e-72c7ffc5d5a8 service nova] Expecting reply to msg 5ae998ab98db4af193956d21d3010a6f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 530.549529] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5ae998ab98db4af193956d21d3010a6f [ 530.557158] env[61273]: DEBUG oslo_vmware.rw_handles [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Completed reading data from the image iterator. {{(pid=61273) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 530.557362] env[61273]: DEBUG oslo_vmware.rw_handles [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d74623c3-f3ac-4e7b-a4c7-9b6f6aa2716f/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61273) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 530.626860] env[61273]: DEBUG nova.compute.manager [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 530.655232] env[61273]: DEBUG nova.virt.hardware [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 530.655470] env[61273]: DEBUG nova.virt.hardware [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 530.655635] env[61273]: DEBUG nova.virt.hardware [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 530.655809] env[61273]: DEBUG nova.virt.hardware [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 530.655945] env[61273]: DEBUG nova.virt.hardware [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 530.656115] env[61273]: DEBUG nova.virt.hardware [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 530.656316] env[61273]: DEBUG nova.virt.hardware [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 530.656468] env[61273]: DEBUG nova.virt.hardware [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 530.656624] env[61273]: DEBUG nova.virt.hardware [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 530.656775] env[61273]: DEBUG nova.virt.hardware [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 530.656940] env[61273]: DEBUG nova.virt.hardware [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 530.658314] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21174cfc-e22f-4c5f-a464-61395a3580bd {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.668694] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84ffd92f-a38d-4182-986b-09fe98afe174 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.712039] env[61273]: DEBUG nova.virt.vmwareapi.images [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Downloaded image file data 4a9e718e-a6a1-4b4a-b567-8e55529b2d5b to vmware_temp/d74623c3-f3ac-4e7b-a4c7-9b6f6aa2716f/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/tmp-sparse.vmdk on the data store datastore1 {{(pid=61273) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 530.713662] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Caching image {{(pid=61273) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 530.713901] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Copying Virtual Disk [datastore1] vmware_temp/d74623c3-f3ac-4e7b-a4c7-9b6f6aa2716f/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/tmp-sparse.vmdk to [datastore1] vmware_temp/d74623c3-f3ac-4e7b-a4c7-9b6f6aa2716f/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk {{(pid=61273) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 530.714167] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5d8c2965-380f-48ef-82d7-ef18aa03789a {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.721472] env[61273]: DEBUG oslo_vmware.api [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Waiting for the task: (returnval){ [ 530.721472] env[61273]: value = "task-375217" [ 530.721472] env[61273]: _type = "Task" [ 530.721472] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 530.734391] env[61273]: DEBUG oslo_vmware.api [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Task: {'id': task-375217, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 530.742150] env[61273]: DEBUG nova.scheduler.client.report [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 530.744529] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] Expecting reply to msg 4e892235271c4b119bb9cb82ca653001 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 530.760972] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4e892235271c4b119bb9cb82ca653001 [ 531.047123] env[61273]: DEBUG oslo_concurrency.lockutils [req-6a1e72ab-512b-412e-bf5a-b3c81b17adf1 req-474526b3-584a-4418-a48e-72c7ffc5d5a8 service nova] Releasing lock "refresh_cache-020a5b3a-bda7-4a8a-9dad-948cee5a7373" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 531.047123] env[61273]: DEBUG nova.compute.manager [req-6a1e72ab-512b-412e-bf5a-b3c81b17adf1 req-474526b3-584a-4418-a48e-72c7ffc5d5a8 service nova] [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] Received event network-vif-deleted-4e2bb2c8-5529-4001-9487-43fbe76c2658 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 531.231999] env[61273]: DEBUG oslo_vmware.api [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Task: {'id': task-375217, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 531.247873] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.664s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 531.248439] env[61273]: DEBUG nova.compute.manager [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 531.250209] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] Expecting reply to msg 57cf4ded298e416aa12d6007112dc3a7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 531.251327] env[61273]: DEBUG oslo_concurrency.lockutils [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.775s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 531.252903] env[61273]: INFO nova.compute.claims [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 531.254673] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Expecting reply to msg 54176dc96997432c8b1217af668bc7ec in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 531.296097] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 57cf4ded298e416aa12d6007112dc3a7 [ 531.298555] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 54176dc96997432c8b1217af668bc7ec [ 531.733449] env[61273]: DEBUG oslo_vmware.api [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Task: {'id': task-375217, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.704035} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 531.733793] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Copied Virtual Disk [datastore1] vmware_temp/d74623c3-f3ac-4e7b-a4c7-9b6f6aa2716f/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/tmp-sparse.vmdk to [datastore1] vmware_temp/d74623c3-f3ac-4e7b-a4c7-9b6f6aa2716f/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk {{(pid=61273) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 531.734023] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Deleting the datastore file [datastore1] vmware_temp/d74623c3-f3ac-4e7b-a4c7-9b6f6aa2716f/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/tmp-sparse.vmdk {{(pid=61273) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 531.734299] env[61273]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cf4a83aa-d135-4bfc-a3da-b1df1e3fe518 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.740708] env[61273]: DEBUG oslo_vmware.api [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Waiting for the task: (returnval){ [ 531.740708] env[61273]: value = "task-375218" [ 531.740708] env[61273]: _type = "Task" [ 531.740708] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 531.748422] env[61273]: DEBUG oslo_vmware.api [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Task: {'id': task-375218, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 531.757962] env[61273]: DEBUG nova.compute.utils [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 531.758956] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] Expecting reply to msg 21d383ce398b45e489e82d0d5167703e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 531.759626] env[61273]: DEBUG nova.compute.manager [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 531.762985] env[61273]: DEBUG nova.network.neutron [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 531.763689] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Expecting reply to msg f537ebcbc218431e94b94000f49c8415 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 531.783391] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f537ebcbc218431e94b94000f49c8415 [ 531.783391] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 21d383ce398b45e489e82d0d5167703e [ 531.887129] env[61273]: DEBUG nova.policy [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e3159b2750904f0caf5cbd42cf43a8c9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '67c0ae43fbdd470dae9b8035b1c8022e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 532.250460] env[61273]: DEBUG oslo_vmware.api [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Task: {'id': task-375218, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.025835} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 532.250703] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Deleted the datastore file {{(pid=61273) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 532.250913] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Moving file from [datastore1] vmware_temp/d74623c3-f3ac-4e7b-a4c7-9b6f6aa2716f/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b to [datastore1] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b. {{(pid=61273) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 532.251165] env[61273]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-abc182b0-96da-4939-a102-78ced0e36c7b {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.257757] env[61273]: DEBUG oslo_vmware.api [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Waiting for the task: (returnval){ [ 532.257757] env[61273]: value = "task-375219" [ 532.257757] env[61273]: _type = "Task" [ 532.257757] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 532.266074] env[61273]: DEBUG nova.compute.manager [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 532.267885] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] Expecting reply to msg 37333740f67c472ebc156b4d08f85ab1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 532.272136] env[61273]: DEBUG oslo_vmware.api [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Task: {'id': task-375219, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 532.319896] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 37333740f67c472ebc156b4d08f85ab1 [ 532.488947] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4447245c-dc45-437a-be89-4ea9de2d1fd5 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.502801] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72a30b6e-07e2-43e3-b568-d9b171fde6c7 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.536584] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 532.537126] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 532.538757] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Starting heal instance info cache {{(pid=61273) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9926}} [ 532.538757] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Rebuilding the list of instances to heal {{(pid=61273) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 532.549331] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg bd456ce9db1f41b49088f691994e53b8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 532.550844] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f953e798-53c0-4939-bed7-f532c028ab17 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.562830] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b0af5b9-b3f6-40af-b006-1d44f46f48a5 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.578830] env[61273]: DEBUG nova.compute.provider_tree [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 532.579446] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Expecting reply to msg 6490f71c7e7d46b6a1674f64d6f0f0b0 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 532.583649] env[61273]: DEBUG nova.network.neutron [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] Successfully created port: 2a66073a-39ef-4960-8443-ca3964c62be0 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 532.584728] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bd456ce9db1f41b49088f691994e53b8 [ 532.592400] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6490f71c7e7d46b6a1674f64d6f0f0b0 [ 532.768031] env[61273]: DEBUG oslo_vmware.api [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Task: {'id': task-375219, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.0281} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 532.768496] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] File moved {{(pid=61273) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 532.769542] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Cleaning up location [datastore1] vmware_temp/d74623c3-f3ac-4e7b-a4c7-9b6f6aa2716f {{(pid=61273) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 532.769542] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Deleting the datastore file [datastore1] vmware_temp/d74623c3-f3ac-4e7b-a4c7-9b6f6aa2716f {{(pid=61273) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 532.769542] env[61273]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bf9492e3-383c-47c8-b3e9-1001feeb2851 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.776628] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] Expecting reply to msg c130c914d56a4dd2866972f46247540e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 532.779568] env[61273]: DEBUG oslo_vmware.api [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Waiting for the task: (returnval){ [ 532.779568] env[61273]: value = "task-375220" [ 532.779568] env[61273]: _type = "Task" [ 532.779568] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 532.790567] env[61273]: DEBUG oslo_vmware.api [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Task: {'id': task-375220, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 532.841639] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c130c914d56a4dd2866972f46247540e [ 533.056427] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] Skipping network cache update for instance because it is Building. {{(pid=61273) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 533.056523] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] [instance: 986f91ac-e44f-474a-885a-bc097b396019] Skipping network cache update for instance because it is Building. {{(pid=61273) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 533.056645] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] Skipping network cache update for instance because it is Building. {{(pid=61273) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 533.056801] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Skipping network cache update for instance because it is Building. {{(pid=61273) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 533.056920] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] Skipping network cache update for instance because it is Building. {{(pid=61273) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 533.057037] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] Skipping network cache update for instance because it is Building. {{(pid=61273) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 533.057151] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] Skipping network cache update for instance because it is Building. {{(pid=61273) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 533.057268] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Didn't find any instances for network info cache update. {{(pid=61273) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10012}} [ 533.057555] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 533.057682] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 533.057855] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 533.058028] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 533.058197] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 533.058367] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager._sync_power_states {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 533.059021] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg f68f4353e9e94f5293894f147ff4b48a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 533.078561] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f68f4353e9e94f5293894f147ff4b48a [ 533.086603] env[61273]: DEBUG nova.scheduler.client.report [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 533.088905] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Expecting reply to msg 3458dff70e0249c49908ff0ea35be9df in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 533.107066] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3458dff70e0249c49908ff0ea35be9df [ 533.282844] env[61273]: DEBUG nova.compute.manager [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 533.294738] env[61273]: DEBUG oslo_vmware.api [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Task: {'id': task-375220, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.02602} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 533.294738] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Deleted the datastore file {{(pid=61273) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 533.295475] env[61273]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0cbe0cd4-0ef5-4ad9-a7b1-5e06bff3f6b5 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.300727] env[61273]: DEBUG oslo_vmware.api [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Waiting for the task: (returnval){ [ 533.300727] env[61273]: value = "session[527e49f3-ce92-061c-8407-29c8a2392124]52be5b54-3d31-7c30-fb2d-3af495aa0fe8" [ 533.300727] env[61273]: _type = "Task" [ 533.300727] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 533.316082] env[61273]: DEBUG nova.virt.hardware [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 533.316082] env[61273]: DEBUG nova.virt.hardware [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 533.316082] env[61273]: DEBUG nova.virt.hardware [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 533.316295] env[61273]: DEBUG nova.virt.hardware [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 533.316295] env[61273]: DEBUG nova.virt.hardware [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 533.316295] env[61273]: DEBUG nova.virt.hardware [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 533.316295] env[61273]: DEBUG nova.virt.hardware [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 533.316295] env[61273]: DEBUG nova.virt.hardware [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 533.316433] env[61273]: DEBUG nova.virt.hardware [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 533.316433] env[61273]: DEBUG nova.virt.hardware [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 533.316433] env[61273]: DEBUG nova.virt.hardware [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 533.316433] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfa157df-8dde-4636-bc20-7c871ba3a065 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.330178] env[61273]: DEBUG oslo_vmware.api [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]52be5b54-3d31-7c30-fb2d-3af495aa0fe8, 'name': SearchDatastore_Task, 'duration_secs': 0.008833} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 533.331502] env[61273]: DEBUG oslo_concurrency.lockutils [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 533.331796] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk to [datastore1] 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2/4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2.vmdk {{(pid=61273) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 533.332128] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-52689f6e-14d4-4159-aaf9-ee9bda4ad504 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.335211] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2318b2e-a8a6-4cd5-8337-6dbb22de115a {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.339989] env[61273]: ERROR nova.compute.manager [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 86deca19-3809-4ccf-a3db-0d79d610d6c5, please check neutron logs for more information. [ 533.339989] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 533.339989] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 533.339989] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 533.339989] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 533.339989] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 533.339989] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 533.339989] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 533.339989] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 533.339989] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 533.339989] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 533.339989] env[61273]: ERROR nova.compute.manager raise self.value [ 533.339989] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 533.339989] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 533.339989] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 533.339989] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 533.340519] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 533.340519] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 533.340519] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 86deca19-3809-4ccf-a3db-0d79d610d6c5, please check neutron logs for more information. [ 533.340519] env[61273]: ERROR nova.compute.manager [ 533.340519] env[61273]: Traceback (most recent call last): [ 533.340519] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 533.340519] env[61273]: listener.cb(fileno) [ 533.340519] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 533.340519] env[61273]: result = function(*args, **kwargs) [ 533.340519] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 533.340519] env[61273]: return func(*args, **kwargs) [ 533.340519] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 533.340519] env[61273]: raise e [ 533.340519] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 533.340519] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 533.340519] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 533.340519] env[61273]: created_port_ids = self._update_ports_for_instance( [ 533.340519] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 533.340519] env[61273]: with excutils.save_and_reraise_exception(): [ 533.340519] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 533.340519] env[61273]: self.force_reraise() [ 533.340519] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 533.340519] env[61273]: raise self.value [ 533.340519] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 533.340519] env[61273]: updated_port = self._update_port( [ 533.340519] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 533.340519] env[61273]: _ensure_no_port_binding_failure(port) [ 533.340519] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 533.340519] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 533.341350] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 86deca19-3809-4ccf-a3db-0d79d610d6c5, please check neutron logs for more information. [ 533.341350] env[61273]: Removing descriptor: 16 [ 533.341350] env[61273]: ERROR nova.compute.manager [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 86deca19-3809-4ccf-a3db-0d79d610d6c5, please check neutron logs for more information. [ 533.341350] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] Traceback (most recent call last): [ 533.341350] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 533.341350] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] yield resources [ 533.341350] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 533.341350] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] self.driver.spawn(context, instance, image_meta, [ 533.341350] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 533.341350] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 533.341350] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 533.341350] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] vm_ref = self.build_virtual_machine(instance, [ 533.341688] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 533.341688] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] vif_infos = vmwarevif.get_vif_info(self._session, [ 533.341688] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 533.341688] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] for vif in network_info: [ 533.341688] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 533.341688] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] return self._sync_wrapper(fn, *args, **kwargs) [ 533.341688] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 533.341688] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] self.wait() [ 533.341688] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 533.341688] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] self[:] = self._gt.wait() [ 533.341688] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 533.341688] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] return self._exit_event.wait() [ 533.341688] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 533.342031] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] result = hub.switch() [ 533.342031] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 533.342031] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] return self.greenlet.switch() [ 533.342031] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 533.342031] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] result = function(*args, **kwargs) [ 533.342031] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 533.342031] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] return func(*args, **kwargs) [ 533.342031] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 533.342031] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] raise e [ 533.342031] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 533.342031] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] nwinfo = self.network_api.allocate_for_instance( [ 533.342031] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 533.342031] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] created_port_ids = self._update_ports_for_instance( [ 533.342366] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 533.342366] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] with excutils.save_and_reraise_exception(): [ 533.342366] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 533.342366] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] self.force_reraise() [ 533.342366] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 533.342366] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] raise self.value [ 533.342366] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 533.342366] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] updated_port = self._update_port( [ 533.342366] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 533.342366] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] _ensure_no_port_binding_failure(port) [ 533.342366] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 533.342366] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] raise exception.PortBindingFailed(port_id=port['id']) [ 533.342766] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] nova.exception.PortBindingFailed: Binding failed for port 86deca19-3809-4ccf-a3db-0d79d610d6c5, please check neutron logs for more information. [ 533.342766] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] [ 533.342766] env[61273]: INFO nova.compute.manager [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] Terminating instance [ 533.343704] env[61273]: DEBUG oslo_concurrency.lockutils [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] Acquiring lock "refresh_cache-8f2cba43-bdec-4455-b795-784b29e2ea5d" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 533.343980] env[61273]: DEBUG oslo_concurrency.lockutils [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] Acquired lock "refresh_cache-8f2cba43-bdec-4455-b795-784b29e2ea5d" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 533.344105] env[61273]: DEBUG nova.network.neutron [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 533.344441] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] Expecting reply to msg e9a2d3fc2f4f4e7c9880c78ddf95a556 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 533.355295] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e9a2d3fc2f4f4e7c9880c78ddf95a556 [ 533.358020] env[61273]: DEBUG oslo_vmware.api [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Waiting for the task: (returnval){ [ 533.358020] env[61273]: value = "task-375221" [ 533.358020] env[61273]: _type = "Task" [ 533.358020] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 533.367837] env[61273]: DEBUG oslo_vmware.api [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Task: {'id': task-375221, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 533.389511] env[61273]: DEBUG nova.network.neutron [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 533.572590] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Getting list of instances from cluster (obj){ [ 533.572590] env[61273]: value = "domain-c8" [ 533.572590] env[61273]: _type = "ClusterComputeResource" [ 533.572590] env[61273]: } {{(pid=61273) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 533.572590] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04b496b7-319a-40c8-9235-613d4ab797e9 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.581767] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Got total of 1 instances {{(pid=61273) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 533.581767] env[61273]: WARNING nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] While synchronizing instance power states, found 7 instances in the database and 1 instances on the hypervisor. [ 533.581767] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Triggering sync for uuid 52eb3ce9-696c-445c-b82d-90663a9e8b21 {{(pid=61273) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10336}} [ 533.581767] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Triggering sync for uuid 986f91ac-e44f-474a-885a-bc097b396019 {{(pid=61273) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10336}} [ 533.581767] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Triggering sync for uuid 020a5b3a-bda7-4a8a-9dad-948cee5a7373 {{(pid=61273) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10336}} [ 533.581767] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Triggering sync for uuid 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2 {{(pid=61273) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10336}} [ 533.581767] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Triggering sync for uuid 8f2cba43-bdec-4455-b795-784b29e2ea5d {{(pid=61273) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10336}} [ 533.582379] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Triggering sync for uuid ea496eae-68f9-43f1-b4cf-6743043c753b {{(pid=61273) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10336}} [ 533.582379] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Triggering sync for uuid f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5 {{(pid=61273) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10336}} [ 533.582437] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Acquiring lock "52eb3ce9-696c-445c-b82d-90663a9e8b21" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 533.582738] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Acquiring lock "986f91ac-e44f-474a-885a-bc097b396019" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 533.582946] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Acquiring lock "020a5b3a-bda7-4a8a-9dad-948cee5a7373" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 533.583625] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Acquiring lock "4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 533.583897] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Acquiring lock "8f2cba43-bdec-4455-b795-784b29e2ea5d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 533.584162] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Acquiring lock "ea496eae-68f9-43f1-b4cf-6743043c753b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 533.584405] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Acquiring lock "f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 533.584589] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 533.584806] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61273) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10545}} [ 533.584958] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager.update_available_resource {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 533.585311] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg cf60bdb075fd4674bc453a087db6b7b1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 533.595227] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cf60bdb075fd4674bc453a087db6b7b1 [ 533.597596] env[61273]: DEBUG oslo_concurrency.lockutils [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.346s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 533.598104] env[61273]: DEBUG nova.compute.manager [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 533.599816] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Expecting reply to msg 4142b54b4d1e4f30bbc2720d05bb6ace in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 533.600838] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.629s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 533.602626] env[61273]: INFO nova.compute.claims [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 533.604504] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Expecting reply to msg 2c6ae935865c436b9cd46df17e8b5bf4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 533.611919] env[61273]: DEBUG nova.network.neutron [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 533.612650] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] Expecting reply to msg 2adc69df567a49b4822c299f2e71e893 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 533.635263] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2adc69df567a49b4822c299f2e71e893 [ 533.668673] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2c6ae935865c436b9cd46df17e8b5bf4 [ 533.686193] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4142b54b4d1e4f30bbc2720d05bb6ace [ 533.868982] env[61273]: DEBUG oslo_vmware.api [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Task: {'id': task-375221, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.505948} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 533.869396] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk to [datastore1] 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2/4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2.vmdk {{(pid=61273) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 533.869788] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Extending root virtual disk to 1048576 {{(pid=61273) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 533.870146] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d1537671-5233-4c38-89bb-c5f30dc06519 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.878987] env[61273]: DEBUG oslo_vmware.api [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Waiting for the task: (returnval){ [ 533.878987] env[61273]: value = "task-375222" [ 533.878987] env[61273]: _type = "Task" [ 533.878987] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 533.888637] env[61273]: DEBUG oslo_vmware.api [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Task: {'id': task-375222, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 533.946620] env[61273]: DEBUG nova.compute.manager [req-8dff3dd1-4d82-44f2-958b-21b4f25cdacd req-3b773b64-a407-46ef-a62e-a0e444f8942c service nova] [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] Received event network-changed-86deca19-3809-4ccf-a3db-0d79d610d6c5 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 533.946817] env[61273]: DEBUG nova.compute.manager [req-8dff3dd1-4d82-44f2-958b-21b4f25cdacd req-3b773b64-a407-46ef-a62e-a0e444f8942c service nova] [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] Refreshing instance network info cache due to event network-changed-86deca19-3809-4ccf-a3db-0d79d610d6c5. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 533.947002] env[61273]: DEBUG oslo_concurrency.lockutils [req-8dff3dd1-4d82-44f2-958b-21b4f25cdacd req-3b773b64-a407-46ef-a62e-a0e444f8942c service nova] Acquiring lock "refresh_cache-8f2cba43-bdec-4455-b795-784b29e2ea5d" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 534.087850] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 534.112362] env[61273]: DEBUG nova.compute.utils [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 534.112362] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Expecting reply to msg 62f1dd145db74f3d87ac2927f9bac0a7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 534.112362] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Expecting reply to msg f4c88e632e3a4e58af5aaad3bea0a66f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 534.112362] env[61273]: DEBUG nova.compute.manager [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 534.112362] env[61273]: DEBUG nova.network.neutron [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 534.118335] env[61273]: DEBUG oslo_concurrency.lockutils [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] Releasing lock "refresh_cache-8f2cba43-bdec-4455-b795-784b29e2ea5d" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 534.122210] env[61273]: DEBUG nova.compute.manager [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 534.122491] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 534.123606] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f4c88e632e3a4e58af5aaad3bea0a66f [ 534.123910] env[61273]: DEBUG oslo_concurrency.lockutils [req-8dff3dd1-4d82-44f2-958b-21b4f25cdacd req-3b773b64-a407-46ef-a62e-a0e444f8942c service nova] Acquired lock "refresh_cache-8f2cba43-bdec-4455-b795-784b29e2ea5d" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 534.125776] env[61273]: DEBUG nova.network.neutron [req-8dff3dd1-4d82-44f2-958b-21b4f25cdacd req-3b773b64-a407-46ef-a62e-a0e444f8942c service nova] [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] Refreshing network info cache for port 86deca19-3809-4ccf-a3db-0d79d610d6c5 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 534.126573] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-8dff3dd1-4d82-44f2-958b-21b4f25cdacd req-3b773b64-a407-46ef-a62e-a0e444f8942c service nova] Expecting reply to msg 55916c18743140a8ad57a92a4bcddc42 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 534.127358] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1b98d6ef-3063-4ada-8d7e-a25c32e8f939 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.136296] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 62f1dd145db74f3d87ac2927f9bac0a7 [ 534.138121] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 55916c18743140a8ad57a92a4bcddc42 [ 534.147225] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3eecfd6-a644-4d70-8449-3fb8b0488b7d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.172471] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8f2cba43-bdec-4455-b795-784b29e2ea5d could not be found. [ 534.172755] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 534.172946] env[61273]: INFO nova.compute.manager [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] Took 0.05 seconds to destroy the instance on the hypervisor. [ 534.173191] env[61273]: DEBUG oslo.service.loopingcall [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 534.173763] env[61273]: DEBUG nova.compute.manager [-] [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 534.173817] env[61273]: DEBUG nova.network.neutron [-] [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 534.297419] env[61273]: DEBUG nova.policy [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1cd76e5588fa4d49bd07b2276708cbb9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2e2cc0c992514f71a9718faad2e13951', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 534.358354] env[61273]: DEBUG nova.network.neutron [-] [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 534.358791] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg a0a1b95542ed43d083d1e3f3ee7f3f63 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 534.366420] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a0a1b95542ed43d083d1e3f3ee7f3f63 [ 534.389155] env[61273]: DEBUG oslo_vmware.api [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Task: {'id': task-375222, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.16568} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 534.389710] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Extended root virtual disk {{(pid=61273) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 534.390916] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55f49f8e-265d-4541-b8d7-d82fa9088228 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.419458] env[61273]: DEBUG nova.virt.vmwareapi.volumeops [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Reconfiguring VM instance instance-00000004 to attach disk [datastore1] 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2/4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2.vmdk or device None with type sparse {{(pid=61273) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 534.419864] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-231e901c-0c1b-4890-ac76-a047ed8fb767 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.442888] env[61273]: DEBUG oslo_vmware.api [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Waiting for the task: (returnval){ [ 534.442888] env[61273]: value = "task-375223" [ 534.442888] env[61273]: _type = "Task" [ 534.442888] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 534.452182] env[61273]: DEBUG oslo_vmware.api [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Task: {'id': task-375223, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 534.622176] env[61273]: DEBUG nova.compute.manager [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 534.624253] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Expecting reply to msg ee93489d1d8b4bb585d258fd58517780 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 534.691979] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ee93489d1d8b4bb585d258fd58517780 [ 534.726165] env[61273]: DEBUG nova.network.neutron [req-8dff3dd1-4d82-44f2-958b-21b4f25cdacd req-3b773b64-a407-46ef-a62e-a0e444f8942c service nova] [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 534.861372] env[61273]: DEBUG nova.network.neutron [-] [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 534.861894] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 3bd3a7eeed8f40bf9fd12eb0d9800af1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 534.879259] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3914072d-12d9-4103-841d-a50454ad4124 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.882323] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3bd3a7eeed8f40bf9fd12eb0d9800af1 [ 534.895649] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-332b634f-04d5-44f4-b2ef-480ef8648104 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.939962] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e209b46-1d59-4e03-9ba8-ff008c896704 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.951333] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f4a93f8-2122-452e-9d76-7a2fbb733f84 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.967365] env[61273]: DEBUG oslo_vmware.api [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Task: {'id': task-375223, 'name': ReconfigVM_Task, 'duration_secs': 0.332716} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 534.977651] env[61273]: DEBUG nova.virt.vmwareapi.volumeops [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Reconfigured VM instance instance-00000004 to attach disk [datastore1] 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2/4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2.vmdk or device None with type sparse {{(pid=61273) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 534.979702] env[61273]: DEBUG nova.compute.provider_tree [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 534.980225] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Expecting reply to msg 9b97ddaa7cd04fe7b298a1749a17e482 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 534.981274] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b3292634-6572-4a3d-9236-bc8de2bb20a1 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.996204] env[61273]: DEBUG oslo_vmware.api [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Waiting for the task: (returnval){ [ 534.996204] env[61273]: value = "task-375224" [ 534.996204] env[61273]: _type = "Task" [ 534.996204] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 535.004285] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9b97ddaa7cd04fe7b298a1749a17e482 [ 535.014444] env[61273]: DEBUG oslo_vmware.api [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Task: {'id': task-375224, 'name': Rename_Task} progress is 14%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 535.110357] env[61273]: DEBUG nova.network.neutron [req-8dff3dd1-4d82-44f2-958b-21b4f25cdacd req-3b773b64-a407-46ef-a62e-a0e444f8942c service nova] [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 535.110875] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-8dff3dd1-4d82-44f2-958b-21b4f25cdacd req-3b773b64-a407-46ef-a62e-a0e444f8942c service nova] Expecting reply to msg f0fe4656dd70410986595e62255e728b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 535.124689] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f0fe4656dd70410986595e62255e728b [ 535.134390] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Expecting reply to msg 23050e096b7240b3a5367e17a197641b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 535.186582] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 23050e096b7240b3a5367e17a197641b [ 535.267563] env[61273]: DEBUG oslo_concurrency.lockutils [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] Acquiring lock "f0c26eb3-e6d6-4d9f-9f07-5add9de6d126" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 535.267793] env[61273]: DEBUG oslo_concurrency.lockutils [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] Lock "f0c26eb3-e6d6-4d9f-9f07-5add9de6d126" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 535.364848] env[61273]: INFO nova.compute.manager [-] [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] Took 1.19 seconds to deallocate network for instance. [ 535.367325] env[61273]: DEBUG nova.compute.claims [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 535.367564] env[61273]: DEBUG oslo_concurrency.lockutils [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 535.485648] env[61273]: DEBUG nova.scheduler.client.report [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 535.487981] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Expecting reply to msg 41e3998ad39d43e2a2a05de517b65524 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 535.499158] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 41e3998ad39d43e2a2a05de517b65524 [ 535.509297] env[61273]: DEBUG oslo_vmware.api [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Task: {'id': task-375224, 'name': Rename_Task} progress is 99%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 535.543216] env[61273]: DEBUG nova.network.neutron [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] Successfully created port: caf04a4d-9a60-4997-9d4d-89dfc0470281 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 535.619658] env[61273]: DEBUG oslo_concurrency.lockutils [req-8dff3dd1-4d82-44f2-958b-21b4f25cdacd req-3b773b64-a407-46ef-a62e-a0e444f8942c service nova] Releasing lock "refresh_cache-8f2cba43-bdec-4455-b795-784b29e2ea5d" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 535.635877] env[61273]: DEBUG nova.compute.manager [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 535.687220] env[61273]: DEBUG nova.virt.hardware [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 535.687220] env[61273]: DEBUG nova.virt.hardware [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 535.687220] env[61273]: DEBUG nova.virt.hardware [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 535.687220] env[61273]: DEBUG nova.virt.hardware [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 535.687426] env[61273]: DEBUG nova.virt.hardware [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 535.687426] env[61273]: DEBUG nova.virt.hardware [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 535.687426] env[61273]: DEBUG nova.virt.hardware [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 535.687426] env[61273]: DEBUG nova.virt.hardware [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 535.687591] env[61273]: DEBUG nova.virt.hardware [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 535.687781] env[61273]: DEBUG nova.virt.hardware [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 535.687888] env[61273]: DEBUG nova.virt.hardware [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 535.688972] env[61273]: ERROR nova.compute.manager [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 2a66073a-39ef-4960-8443-ca3964c62be0, please check neutron logs for more information. [ 535.688972] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 535.688972] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 535.688972] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 535.688972] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 535.688972] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 535.688972] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 535.688972] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 535.688972] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 535.688972] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 535.688972] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 535.688972] env[61273]: ERROR nova.compute.manager raise self.value [ 535.688972] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 535.688972] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 535.688972] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 535.688972] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 535.689480] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 535.689480] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 535.689480] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 2a66073a-39ef-4960-8443-ca3964c62be0, please check neutron logs for more information. [ 535.689480] env[61273]: ERROR nova.compute.manager [ 535.689480] env[61273]: Traceback (most recent call last): [ 535.689480] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 535.689480] env[61273]: listener.cb(fileno) [ 535.689480] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 535.689480] env[61273]: result = function(*args, **kwargs) [ 535.689480] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 535.689480] env[61273]: return func(*args, **kwargs) [ 535.689480] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 535.689480] env[61273]: raise e [ 535.689480] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 535.689480] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 535.689480] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 535.689480] env[61273]: created_port_ids = self._update_ports_for_instance( [ 535.689480] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 535.689480] env[61273]: with excutils.save_and_reraise_exception(): [ 535.689480] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 535.689480] env[61273]: self.force_reraise() [ 535.689480] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 535.689480] env[61273]: raise self.value [ 535.689480] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 535.689480] env[61273]: updated_port = self._update_port( [ 535.689480] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 535.689480] env[61273]: _ensure_no_port_binding_failure(port) [ 535.689480] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 535.689480] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 535.690253] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 2a66073a-39ef-4960-8443-ca3964c62be0, please check neutron logs for more information. [ 535.690253] env[61273]: Removing descriptor: 15 [ 535.690253] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e308046-9285-4e04-b03e-6e57ca36a4a5 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.696531] env[61273]: ERROR nova.compute.manager [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 2a66073a-39ef-4960-8443-ca3964c62be0, please check neutron logs for more information. [ 535.696531] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] Traceback (most recent call last): [ 535.696531] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 535.696531] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] yield resources [ 535.696531] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 535.696531] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] self.driver.spawn(context, instance, image_meta, [ 535.696531] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 535.696531] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 535.696531] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 535.696531] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] vm_ref = self.build_virtual_machine(instance, [ 535.696531] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 535.697056] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] vif_infos = vmwarevif.get_vif_info(self._session, [ 535.697056] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 535.697056] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] for vif in network_info: [ 535.697056] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 535.697056] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] return self._sync_wrapper(fn, *args, **kwargs) [ 535.697056] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 535.697056] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] self.wait() [ 535.697056] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 535.697056] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] self[:] = self._gt.wait() [ 535.697056] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 535.697056] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] return self._exit_event.wait() [ 535.697056] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 535.697056] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] result = hub.switch() [ 535.697400] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 535.697400] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] return self.greenlet.switch() [ 535.697400] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 535.697400] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] result = function(*args, **kwargs) [ 535.697400] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 535.697400] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] return func(*args, **kwargs) [ 535.697400] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 535.697400] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] raise e [ 535.697400] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 535.697400] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] nwinfo = self.network_api.allocate_for_instance( [ 535.697400] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 535.697400] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] created_port_ids = self._update_ports_for_instance( [ 535.697400] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 535.697738] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] with excutils.save_and_reraise_exception(): [ 535.697738] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 535.697738] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] self.force_reraise() [ 535.697738] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 535.697738] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] raise self.value [ 535.697738] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 535.697738] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] updated_port = self._update_port( [ 535.697738] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 535.697738] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] _ensure_no_port_binding_failure(port) [ 535.697738] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 535.697738] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] raise exception.PortBindingFailed(port_id=port['id']) [ 535.697738] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] nova.exception.PortBindingFailed: Binding failed for port 2a66073a-39ef-4960-8443-ca3964c62be0, please check neutron logs for more information. [ 535.697738] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] [ 535.698115] env[61273]: INFO nova.compute.manager [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] Terminating instance [ 535.699020] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] Acquiring lock "refresh_cache-ea496eae-68f9-43f1-b4cf-6743043c753b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 535.699569] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] Acquired lock "refresh_cache-ea496eae-68f9-43f1-b4cf-6743043c753b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 535.699823] env[61273]: DEBUG nova.network.neutron [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 535.700474] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] Expecting reply to msg 59f2de9abb524ef5af8441c1ce39066c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 535.705567] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be94930c-a0a9-43d3-b002-ed5235ba054a {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.712702] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 59f2de9abb524ef5af8441c1ce39066c [ 535.991190] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.390s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 535.991737] env[61273]: DEBUG nova.compute.manager [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 535.993484] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Expecting reply to msg 20ac468a54f7427b97f9896cd6847181 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 535.994476] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 11.778s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 535.996227] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 34a0e23af048406a9da6b825807fb3d6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 536.009069] env[61273]: DEBUG oslo_vmware.api [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Task: {'id': task-375224, 'name': Rename_Task} progress is 99%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 536.031196] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 20ac468a54f7427b97f9896cd6847181 [ 536.032577] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 34a0e23af048406a9da6b825807fb3d6 [ 536.295751] env[61273]: DEBUG nova.network.neutron [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 536.501717] env[61273]: DEBUG nova.compute.utils [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 536.501717] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Expecting reply to msg c995fc2b57c442b09165f05c76f6c80c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 536.505157] env[61273]: DEBUG nova.compute.manager [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 536.505334] env[61273]: DEBUG nova.network.neutron [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 536.516670] env[61273]: DEBUG oslo_vmware.api [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Task: {'id': task-375224, 'name': Rename_Task, 'duration_secs': 1.139684} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 536.517191] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c995fc2b57c442b09165f05c76f6c80c [ 536.517607] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Powering on the VM {{(pid=61273) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 536.519714] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-53c47768-af99-4076-a918-e218a1b9aee9 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.526991] env[61273]: DEBUG oslo_vmware.api [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Waiting for the task: (returnval){ [ 536.526991] env[61273]: value = "task-375225" [ 536.526991] env[61273]: _type = "Task" [ 536.526991] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 536.544343] env[61273]: DEBUG oslo_vmware.api [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Task: {'id': task-375225, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 536.620703] env[61273]: DEBUG nova.network.neutron [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 536.621225] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] Expecting reply to msg 621f6b46b1db4b9cbfb2e757ddb16d57 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 536.635519] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 621f6b46b1db4b9cbfb2e757ddb16d57 [ 536.666438] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Acquiring lock "8f37dfb3-c4d2-4c41-91eb-e3c7ca640050" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 536.666689] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Lock "8f37dfb3-c4d2-4c41-91eb-e3c7ca640050" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 536.683272] env[61273]: DEBUG nova.policy [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1cd76e5588fa4d49bd07b2276708cbb9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2e2cc0c992514f71a9718faad2e13951', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 536.750392] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f3dee38-9524-4cfd-9770-fde7c7f184cb {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.757542] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee93ce50-6629-4df5-adb0-c6ac6c435237 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.795419] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ea24390-0893-412a-8837-3d0eecb8ab4b {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.802632] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34939cb5-36ae-420f-8c54-6f9c6d8abe7e {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.818876] env[61273]: DEBUG nova.compute.provider_tree [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 536.819387] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 819adc23b3e64e088c89abe3d48ca1ff in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 536.827353] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 819adc23b3e64e088c89abe3d48ca1ff [ 537.011761] env[61273]: DEBUG nova.compute.manager [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 537.011761] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Expecting reply to msg e622af68c4e34c4c9001f255e8ace817 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 537.040967] env[61273]: DEBUG oslo_vmware.api [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Task: {'id': task-375225, 'name': PowerOnVM_Task, 'duration_secs': 0.461213} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 537.040967] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Powered on the VM {{(pid=61273) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 537.040967] env[61273]: INFO nova.compute.manager [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Took 8.73 seconds to spawn the instance on the hypervisor. [ 537.040967] env[61273]: DEBUG nova.compute.manager [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Checking state {{(pid=61273) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 537.042478] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02bdc950-6cf8-45ba-a239-8bbf8c374d44 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.052711] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Expecting reply to msg 268a100cc4444c4cbac22af71d686f43 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 537.056981] env[61273]: DEBUG nova.compute.manager [req-7862a8de-4965-4345-8c71-d08604fbf616 req-9f3a42f6-5d8b-494c-9178-0140c10b982d service nova] [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] Received event network-vif-deleted-86deca19-3809-4ccf-a3db-0d79d610d6c5 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 537.057367] env[61273]: DEBUG nova.compute.manager [req-7862a8de-4965-4345-8c71-d08604fbf616 req-9f3a42f6-5d8b-494c-9178-0140c10b982d service nova] [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] Received event network-changed-2a66073a-39ef-4960-8443-ca3964c62be0 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 537.057742] env[61273]: DEBUG nova.compute.manager [req-7862a8de-4965-4345-8c71-d08604fbf616 req-9f3a42f6-5d8b-494c-9178-0140c10b982d service nova] [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] Refreshing instance network info cache due to event network-changed-2a66073a-39ef-4960-8443-ca3964c62be0. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 537.058072] env[61273]: DEBUG oslo_concurrency.lockutils [req-7862a8de-4965-4345-8c71-d08604fbf616 req-9f3a42f6-5d8b-494c-9178-0140c10b982d service nova] Acquiring lock "refresh_cache-ea496eae-68f9-43f1-b4cf-6743043c753b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 537.064556] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e622af68c4e34c4c9001f255e8ace817 [ 537.107403] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 268a100cc4444c4cbac22af71d686f43 [ 537.124764] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] Releasing lock "refresh_cache-ea496eae-68f9-43f1-b4cf-6743043c753b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 537.125216] env[61273]: DEBUG nova.compute.manager [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 537.125437] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 537.125789] env[61273]: DEBUG oslo_concurrency.lockutils [req-7862a8de-4965-4345-8c71-d08604fbf616 req-9f3a42f6-5d8b-494c-9178-0140c10b982d service nova] Acquired lock "refresh_cache-ea496eae-68f9-43f1-b4cf-6743043c753b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 537.126017] env[61273]: DEBUG nova.network.neutron [req-7862a8de-4965-4345-8c71-d08604fbf616 req-9f3a42f6-5d8b-494c-9178-0140c10b982d service nova] [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] Refreshing network info cache for port 2a66073a-39ef-4960-8443-ca3964c62be0 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 537.126503] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-7862a8de-4965-4345-8c71-d08604fbf616 req-9f3a42f6-5d8b-494c-9178-0140c10b982d service nova] Expecting reply to msg d79ac35088854a6188c2ea81ff181d9c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 537.127434] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-600af6bb-6ef4-4158-b622-d8e5272d6bf6 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.134115] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d79ac35088854a6188c2ea81ff181d9c [ 537.137708] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff6af64c-545a-4085-befc-d4b8b95e2cbb {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.168916] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ea496eae-68f9-43f1-b4cf-6743043c753b could not be found. [ 537.169065] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 537.169245] env[61273]: INFO nova.compute.manager [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] Took 0.04 seconds to destroy the instance on the hypervisor. [ 537.169466] env[61273]: DEBUG oslo.service.loopingcall [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 537.169695] env[61273]: DEBUG nova.compute.manager [-] [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 537.169869] env[61273]: DEBUG nova.network.neutron [-] [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 537.221472] env[61273]: DEBUG nova.network.neutron [-] [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 537.223111] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 5dbe811c325b40c59fd4fee0b99959d3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 537.238724] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5dbe811c325b40c59fd4fee0b99959d3 [ 537.326128] env[61273]: DEBUG nova.scheduler.client.report [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 537.326128] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg f34c8559efaf42038b3563b551796d1f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 537.340647] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f34c8559efaf42038b3563b551796d1f [ 537.447093] env[61273]: DEBUG nova.network.neutron [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] Successfully created port: 916f0a29-ac67-4ea8-b7f9-7d34e8b0b737 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 537.515705] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Expecting reply to msg b153679e26c0432a8f5a4133b6f46fbb in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 537.569188] env[61273]: INFO nova.compute.manager [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Took 19.17 seconds to build instance. [ 537.569188] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Expecting reply to msg 153c4c603c5244daaabffa9f5ce37164 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 537.587672] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b153679e26c0432a8f5a4133b6f46fbb [ 537.594739] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 153c4c603c5244daaabffa9f5ce37164 [ 537.686670] env[61273]: DEBUG nova.network.neutron [req-7862a8de-4965-4345-8c71-d08604fbf616 req-9f3a42f6-5d8b-494c-9178-0140c10b982d service nova] [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 537.724938] env[61273]: DEBUG nova.network.neutron [-] [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 537.725419] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 70166c0056de47068f80bcc00bc580e8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 537.740786] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 70166c0056de47068f80bcc00bc580e8 [ 537.828216] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.834s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 537.828882] env[61273]: ERROR nova.compute.manager [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port d6dd597b-ff4a-4e48-b10a-96073004f3eb, please check neutron logs for more information. [ 537.828882] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] Traceback (most recent call last): [ 537.828882] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 537.828882] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] self.driver.spawn(context, instance, image_meta, [ 537.828882] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 537.828882] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] self._vmops.spawn(context, instance, image_meta, injected_files, [ 537.828882] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 537.828882] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] vm_ref = self.build_virtual_machine(instance, [ 537.828882] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 537.828882] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] vif_infos = vmwarevif.get_vif_info(self._session, [ 537.828882] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 537.829254] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] for vif in network_info: [ 537.829254] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 537.829254] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] return self._sync_wrapper(fn, *args, **kwargs) [ 537.829254] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 537.829254] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] self.wait() [ 537.829254] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 537.829254] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] self[:] = self._gt.wait() [ 537.829254] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 537.829254] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] return self._exit_event.wait() [ 537.829254] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 537.829254] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] result = hub.switch() [ 537.829254] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 537.829254] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] return self.greenlet.switch() [ 537.829652] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 537.829652] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] result = function(*args, **kwargs) [ 537.829652] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 537.829652] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] return func(*args, **kwargs) [ 537.829652] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 537.829652] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] raise e [ 537.829652] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 537.829652] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] nwinfo = self.network_api.allocate_for_instance( [ 537.829652] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 537.829652] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] created_port_ids = self._update_ports_for_instance( [ 537.829652] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 537.829652] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] with excutils.save_and_reraise_exception(): [ 537.829652] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 537.830109] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] self.force_reraise() [ 537.830109] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 537.830109] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] raise self.value [ 537.830109] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 537.830109] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] updated_port = self._update_port( [ 537.830109] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 537.830109] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] _ensure_no_port_binding_failure(port) [ 537.830109] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 537.830109] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] raise exception.PortBindingFailed(port_id=port['id']) [ 537.830109] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] nova.exception.PortBindingFailed: Binding failed for port d6dd597b-ff4a-4e48-b10a-96073004f3eb, please check neutron logs for more information. [ 537.830109] env[61273]: ERROR nova.compute.manager [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] [ 537.830403] env[61273]: DEBUG nova.compute.utils [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] Binding failed for port d6dd597b-ff4a-4e48-b10a-96073004f3eb, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 537.830700] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.197s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 537.832163] env[61273]: INFO nova.compute.claims [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 537.833734] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] Expecting reply to msg 72bf98beb0644216b02450631de85ba6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 537.836661] env[61273]: DEBUG nova.compute.manager [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] Build of instance 52eb3ce9-696c-445c-b82d-90663a9e8b21 was re-scheduled: Binding failed for port d6dd597b-ff4a-4e48-b10a-96073004f3eb, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 537.837114] env[61273]: DEBUG nova.compute.manager [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 537.837333] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Acquiring lock "refresh_cache-52eb3ce9-696c-445c-b82d-90663a9e8b21" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 537.837471] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Acquired lock "refresh_cache-52eb3ce9-696c-445c-b82d-90663a9e8b21" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 537.837620] env[61273]: DEBUG nova.network.neutron [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 537.838004] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 5546dd98916b4db18ca0aa06e12b4f73 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 537.849027] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5546dd98916b4db18ca0aa06e12b4f73 [ 537.851592] env[61273]: DEBUG nova.network.neutron [req-7862a8de-4965-4345-8c71-d08604fbf616 req-9f3a42f6-5d8b-494c-9178-0140c10b982d service nova] [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 537.852096] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-7862a8de-4965-4345-8c71-d08604fbf616 req-9f3a42f6-5d8b-494c-9178-0140c10b982d service nova] Expecting reply to msg da73ff88489f4d8abb03cf7a097b45f7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 537.875168] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg da73ff88489f4d8abb03cf7a097b45f7 [ 537.888463] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 72bf98beb0644216b02450631de85ba6 [ 538.019015] env[61273]: DEBUG nova.compute.manager [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 538.047049] env[61273]: DEBUG nova.virt.hardware [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 538.047244] env[61273]: DEBUG nova.virt.hardware [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 538.047401] env[61273]: DEBUG nova.virt.hardware [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 538.047575] env[61273]: DEBUG nova.virt.hardware [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 538.047714] env[61273]: DEBUG nova.virt.hardware [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 538.047855] env[61273]: DEBUG nova.virt.hardware [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 538.048103] env[61273]: DEBUG nova.virt.hardware [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 538.048228] env[61273]: DEBUG nova.virt.hardware [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 538.048388] env[61273]: DEBUG nova.virt.hardware [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 538.048540] env[61273]: DEBUG nova.virt.hardware [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 538.048792] env[61273]: DEBUG nova.virt.hardware [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 538.049600] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0ec0fb0-b1d8-47dc-a076-74bbb7f830ef {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.057882] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfc41760-b726-4e72-a5f2-d8c20d0a9fb3 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.072749] env[61273]: DEBUG oslo_concurrency.lockutils [None req-a67fda18-1c60-4ab4-89ee-00087cc2ef3a tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Lock "4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.708s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 538.073521] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Expecting reply to msg 47ff04f08288498c9516b63b3458744c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 538.074424] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Lock "4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 4.491s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 538.074521] env[61273]: INFO nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] During sync_power_state the instance has a pending task (spawning). Skip. [ 538.074687] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Lock "4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 538.088171] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 47ff04f08288498c9516b63b3458744c [ 538.162774] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] Acquiring lock "3f1f549f-8034-4685-b6f0-db5a7a2a4a32" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 538.162894] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] Lock "3f1f549f-8034-4685-b6f0-db5a7a2a4a32" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 538.231051] env[61273]: INFO nova.compute.manager [-] [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] Took 1.06 seconds to deallocate network for instance. [ 538.233550] env[61273]: DEBUG nova.compute.claims [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 538.233732] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 538.337843] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] Expecting reply to msg c6e2a46bc76249d08b9d2d05ba7cd3c1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 538.345485] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c6e2a46bc76249d08b9d2d05ba7cd3c1 [ 538.356391] env[61273]: DEBUG oslo_concurrency.lockutils [req-7862a8de-4965-4345-8c71-d08604fbf616 req-9f3a42f6-5d8b-494c-9178-0140c10b982d service nova] Releasing lock "refresh_cache-ea496eae-68f9-43f1-b4cf-6743043c753b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 538.357093] env[61273]: DEBUG nova.compute.manager [req-7862a8de-4965-4345-8c71-d08604fbf616 req-9f3a42f6-5d8b-494c-9178-0140c10b982d service nova] [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] Received event network-vif-deleted-2a66073a-39ef-4960-8443-ca3964c62be0 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 538.363649] env[61273]: DEBUG nova.network.neutron [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 538.585490] env[61273]: DEBUG nova.compute.manager [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] [instance: 62c3b24d-bee7-4dd2-a6c7-9303c7c28cca] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 538.585490] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Expecting reply to msg a0cc3613191e40afb616ab9727474891 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 538.586993] env[61273]: DEBUG nova.network.neutron [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 538.587089] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg bfb9cabd3d944d8681cc69219e62828d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 538.614400] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bfb9cabd3d944d8681cc69219e62828d [ 538.716623] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a0cc3613191e40afb616ab9727474891 [ 539.065241] env[61273]: ERROR nova.compute.manager [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port caf04a4d-9a60-4997-9d4d-89dfc0470281, please check neutron logs for more information. [ 539.065241] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 539.065241] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 539.065241] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 539.065241] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 539.065241] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 539.065241] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 539.065241] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 539.065241] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 539.065241] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 539.065241] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 539.065241] env[61273]: ERROR nova.compute.manager raise self.value [ 539.065241] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 539.065241] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 539.065241] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 539.065241] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 539.065800] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 539.065800] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 539.065800] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port caf04a4d-9a60-4997-9d4d-89dfc0470281, please check neutron logs for more information. [ 539.065800] env[61273]: ERROR nova.compute.manager [ 539.065800] env[61273]: Traceback (most recent call last): [ 539.065800] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 539.065800] env[61273]: listener.cb(fileno) [ 539.065800] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 539.065800] env[61273]: result = function(*args, **kwargs) [ 539.065800] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 539.065800] env[61273]: return func(*args, **kwargs) [ 539.065800] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 539.065800] env[61273]: raise e [ 539.065800] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 539.065800] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 539.065800] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 539.065800] env[61273]: created_port_ids = self._update_ports_for_instance( [ 539.065800] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 539.065800] env[61273]: with excutils.save_and_reraise_exception(): [ 539.065800] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 539.065800] env[61273]: self.force_reraise() [ 539.065800] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 539.065800] env[61273]: raise self.value [ 539.065800] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 539.065800] env[61273]: updated_port = self._update_port( [ 539.065800] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 539.065800] env[61273]: _ensure_no_port_binding_failure(port) [ 539.065800] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 539.065800] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 539.066644] env[61273]: nova.exception.PortBindingFailed: Binding failed for port caf04a4d-9a60-4997-9d4d-89dfc0470281, please check neutron logs for more information. [ 539.066644] env[61273]: Removing descriptor: 16 [ 539.066644] env[61273]: ERROR nova.compute.manager [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port caf04a4d-9a60-4997-9d4d-89dfc0470281, please check neutron logs for more information. [ 539.066644] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] Traceback (most recent call last): [ 539.066644] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 539.066644] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] yield resources [ 539.066644] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 539.066644] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] self.driver.spawn(context, instance, image_meta, [ 539.066644] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 539.066644] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 539.066644] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 539.066644] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] vm_ref = self.build_virtual_machine(instance, [ 539.066984] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 539.066984] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] vif_infos = vmwarevif.get_vif_info(self._session, [ 539.066984] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 539.066984] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] for vif in network_info: [ 539.066984] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 539.066984] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] return self._sync_wrapper(fn, *args, **kwargs) [ 539.066984] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 539.066984] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] self.wait() [ 539.066984] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 539.066984] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] self[:] = self._gt.wait() [ 539.066984] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 539.066984] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] return self._exit_event.wait() [ 539.066984] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 539.067379] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] result = hub.switch() [ 539.067379] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 539.067379] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] return self.greenlet.switch() [ 539.067379] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 539.067379] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] result = function(*args, **kwargs) [ 539.067379] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 539.067379] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] return func(*args, **kwargs) [ 539.067379] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 539.067379] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] raise e [ 539.067379] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 539.067379] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] nwinfo = self.network_api.allocate_for_instance( [ 539.067379] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 539.067379] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] created_port_ids = self._update_ports_for_instance( [ 539.067775] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 539.067775] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] with excutils.save_and_reraise_exception(): [ 539.067775] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 539.067775] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] self.force_reraise() [ 539.067775] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 539.067775] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] raise self.value [ 539.067775] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 539.067775] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] updated_port = self._update_port( [ 539.067775] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 539.067775] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] _ensure_no_port_binding_failure(port) [ 539.067775] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 539.067775] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] raise exception.PortBindingFailed(port_id=port['id']) [ 539.068147] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] nova.exception.PortBindingFailed: Binding failed for port caf04a4d-9a60-4997-9d4d-89dfc0470281, please check neutron logs for more information. [ 539.068147] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] [ 539.068147] env[61273]: INFO nova.compute.manager [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] Terminating instance [ 539.069140] env[61273]: DEBUG oslo_concurrency.lockutils [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Acquiring lock "refresh_cache-f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 539.069321] env[61273]: DEBUG oslo_concurrency.lockutils [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Acquired lock "refresh_cache-f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 539.069513] env[61273]: DEBUG nova.network.neutron [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 539.073307] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Expecting reply to msg 27b309b0b56c4d9d8a08ceb724b09705 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 539.090812] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Releasing lock "refresh_cache-52eb3ce9-696c-445c-b82d-90663a9e8b21" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 539.091037] env[61273]: DEBUG nova.compute.manager [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 539.091198] env[61273]: DEBUG nova.compute.manager [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 539.091354] env[61273]: DEBUG nova.network.neutron [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 539.096465] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 27b309b0b56c4d9d8a08ceb724b09705 [ 539.108064] env[61273]: DEBUG oslo_concurrency.lockutils [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] Acquiring lock "36c3ac75-5bfd-4a89-9ddb-28fded8da39c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 539.108293] env[61273]: DEBUG oslo_concurrency.lockutils [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] Lock "36c3ac75-5bfd-4a89-9ddb-28fded8da39c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 539.114915] env[61273]: DEBUG oslo_concurrency.lockutils [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 539.157998] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faa0ad2e-e0c6-4470-a519-60a14879d62b {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.166259] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f0931a7-9d39-42c2-acb3-4f86eecf4dc7 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.171982] env[61273]: DEBUG nova.network.neutron [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 539.172617] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg d100239d8a4944d696ff4146e962a966 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 539.211560] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d100239d8a4944d696ff4146e962a966 [ 539.212397] env[61273]: DEBUG nova.network.neutron [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 539.213035] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 77b5d9d938734b6fae3c86dcb723d541 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 539.214099] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d4c9c21-5522-41fd-954f-add67d2e53ce {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.223245] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f402bca0-ad07-4a14-a055-0473063379cf {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.229568] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 77b5d9d938734b6fae3c86dcb723d541 [ 539.243469] env[61273]: DEBUG nova.compute.provider_tree [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 539.244065] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] Expecting reply to msg d812801126884bcab2196a69884eb27b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 539.257607] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d812801126884bcab2196a69884eb27b [ 539.380196] env[61273]: ERROR nova.compute.manager [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 916f0a29-ac67-4ea8-b7f9-7d34e8b0b737, please check neutron logs for more information. [ 539.380196] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 539.380196] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 539.380196] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 539.380196] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 539.380196] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 539.380196] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 539.380196] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 539.380196] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 539.380196] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 539.380196] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 539.380196] env[61273]: ERROR nova.compute.manager raise self.value [ 539.380196] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 539.380196] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 539.380196] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 539.380196] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 539.380617] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 539.380617] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 539.380617] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 916f0a29-ac67-4ea8-b7f9-7d34e8b0b737, please check neutron logs for more information. [ 539.380617] env[61273]: ERROR nova.compute.manager [ 539.380617] env[61273]: Traceback (most recent call last): [ 539.380617] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 539.380617] env[61273]: listener.cb(fileno) [ 539.380617] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 539.380617] env[61273]: result = function(*args, **kwargs) [ 539.380617] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 539.380617] env[61273]: return func(*args, **kwargs) [ 539.380617] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 539.380617] env[61273]: raise e [ 539.380617] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 539.380617] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 539.380617] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 539.380617] env[61273]: created_port_ids = self._update_ports_for_instance( [ 539.380617] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 539.380617] env[61273]: with excutils.save_and_reraise_exception(): [ 539.380617] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 539.380617] env[61273]: self.force_reraise() [ 539.380617] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 539.380617] env[61273]: raise self.value [ 539.380617] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 539.380617] env[61273]: updated_port = self._update_port( [ 539.380617] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 539.380617] env[61273]: _ensure_no_port_binding_failure(port) [ 539.380617] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 539.380617] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 539.381460] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 916f0a29-ac67-4ea8-b7f9-7d34e8b0b737, please check neutron logs for more information. [ 539.381460] env[61273]: Removing descriptor: 19 [ 539.381460] env[61273]: ERROR nova.compute.manager [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 916f0a29-ac67-4ea8-b7f9-7d34e8b0b737, please check neutron logs for more information. [ 539.381460] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] Traceback (most recent call last): [ 539.381460] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 539.381460] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] yield resources [ 539.381460] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 539.381460] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] self.driver.spawn(context, instance, image_meta, [ 539.381460] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 539.381460] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] self._vmops.spawn(context, instance, image_meta, injected_files, [ 539.381460] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 539.381460] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] vm_ref = self.build_virtual_machine(instance, [ 539.381823] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 539.381823] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] vif_infos = vmwarevif.get_vif_info(self._session, [ 539.381823] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 539.381823] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] for vif in network_info: [ 539.381823] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 539.381823] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] return self._sync_wrapper(fn, *args, **kwargs) [ 539.381823] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 539.381823] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] self.wait() [ 539.381823] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 539.381823] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] self[:] = self._gt.wait() [ 539.381823] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 539.381823] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] return self._exit_event.wait() [ 539.381823] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 539.382202] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] result = hub.switch() [ 539.382202] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 539.382202] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] return self.greenlet.switch() [ 539.382202] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 539.382202] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] result = function(*args, **kwargs) [ 539.382202] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 539.382202] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] return func(*args, **kwargs) [ 539.382202] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 539.382202] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] raise e [ 539.382202] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 539.382202] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] nwinfo = self.network_api.allocate_for_instance( [ 539.382202] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 539.382202] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] created_port_ids = self._update_ports_for_instance( [ 539.382569] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 539.382569] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] with excutils.save_and_reraise_exception(): [ 539.382569] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 539.382569] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] self.force_reraise() [ 539.382569] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 539.382569] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] raise self.value [ 539.382569] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 539.382569] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] updated_port = self._update_port( [ 539.382569] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 539.382569] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] _ensure_no_port_binding_failure(port) [ 539.382569] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 539.382569] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] raise exception.PortBindingFailed(port_id=port['id']) [ 539.382902] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] nova.exception.PortBindingFailed: Binding failed for port 916f0a29-ac67-4ea8-b7f9-7d34e8b0b737, please check neutron logs for more information. [ 539.382902] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] [ 539.382902] env[61273]: INFO nova.compute.manager [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] Terminating instance [ 539.382902] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Acquiring lock "refresh_cache-40709d2b-cc33-4ac0-9a13-731442d7edff" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 539.382902] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Acquired lock "refresh_cache-40709d2b-cc33-4ac0-9a13-731442d7edff" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 539.382902] env[61273]: DEBUG nova.network.neutron [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 539.383083] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Expecting reply to msg 07491fc1980e434b8515b80f0fbd6f8d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 539.391252] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 07491fc1980e434b8515b80f0fbd6f8d [ 539.618192] env[61273]: DEBUG nova.network.neutron [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 539.718249] env[61273]: INFO nova.compute.manager [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] Took 0.63 seconds to deallocate network for instance. [ 539.720156] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg e609efd62638432a949b0b79d7660caa in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 539.746645] env[61273]: DEBUG nova.scheduler.client.report [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 539.751165] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] Expecting reply to msg 3d9bf005691647c3b8e1438538152ead in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 539.778227] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3d9bf005691647c3b8e1438538152ead [ 539.782265] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e609efd62638432a949b0b79d7660caa [ 539.881639] env[61273]: DEBUG nova.network.neutron [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 539.881944] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Expecting reply to msg bdedd95fd2d24d40b31bff0097b373cc in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 539.893451] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bdedd95fd2d24d40b31bff0097b373cc [ 539.908154] env[61273]: DEBUG nova.network.neutron [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 540.050416] env[61273]: DEBUG nova.network.neutron [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 540.050960] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Expecting reply to msg 50e2392d8fdf4338ad4eeda99761040b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 540.061821] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 50e2392d8fdf4338ad4eeda99761040b [ 540.088336] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Expecting reply to msg 552aae721cbb49e8be70d6396b5f4115 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 540.107880] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 552aae721cbb49e8be70d6396b5f4115 [ 540.225037] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 1a878ef9e951457d950874c0fc92567d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 540.253675] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.423s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 540.254252] env[61273]: DEBUG nova.compute.manager [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 540.255909] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] Expecting reply to msg c2f094b48c624afa9124e4f26cddb7e6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 540.256950] env[61273]: DEBUG oslo_concurrency.lockutils [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.938s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 540.260171] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Expecting reply to msg acb79985a11b41b98b4e3b1f74511631 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 540.283103] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1a878ef9e951457d950874c0fc92567d [ 540.303408] env[61273]: DEBUG nova.compute.manager [req-417b81b8-0afc-44f7-bc0a-abbe4b974403 req-8f74cdf0-fd29-4b0a-ad99-04440fb5fcd8 service nova] [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] Received event network-changed-caf04a4d-9a60-4997-9d4d-89dfc0470281 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 540.303679] env[61273]: DEBUG nova.compute.manager [req-417b81b8-0afc-44f7-bc0a-abbe4b974403 req-8f74cdf0-fd29-4b0a-ad99-04440fb5fcd8 service nova] [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] Refreshing instance network info cache due to event network-changed-caf04a4d-9a60-4997-9d4d-89dfc0470281. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 540.303879] env[61273]: DEBUG oslo_concurrency.lockutils [req-417b81b8-0afc-44f7-bc0a-abbe4b974403 req-8f74cdf0-fd29-4b0a-ad99-04440fb5fcd8 service nova] Acquiring lock "refresh_cache-f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 540.332389] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg acb79985a11b41b98b4e3b1f74511631 [ 540.333615] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c2f094b48c624afa9124e4f26cddb7e6 [ 540.384772] env[61273]: DEBUG oslo_concurrency.lockutils [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Releasing lock "refresh_cache-f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 540.385250] env[61273]: DEBUG nova.compute.manager [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 540.385442] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 540.386761] env[61273]: DEBUG oslo_concurrency.lockutils [req-417b81b8-0afc-44f7-bc0a-abbe4b974403 req-8f74cdf0-fd29-4b0a-ad99-04440fb5fcd8 service nova] Acquired lock "refresh_cache-f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 540.386886] env[61273]: DEBUG nova.network.neutron [req-417b81b8-0afc-44f7-bc0a-abbe4b974403 req-8f74cdf0-fd29-4b0a-ad99-04440fb5fcd8 service nova] [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] Refreshing network info cache for port caf04a4d-9a60-4997-9d4d-89dfc0470281 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 540.387304] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-417b81b8-0afc-44f7-bc0a-abbe4b974403 req-8f74cdf0-fd29-4b0a-ad99-04440fb5fcd8 service nova] Expecting reply to msg c3d18d3213fd4af59acdc73e601431fa in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 540.388018] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ae76b129-d128-44b7-b509-1ac1d3c499d8 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.391275] env[61273]: DEBUG nova.compute.manager [req-6a040399-13ac-4508-97df-eca6351caa7b req-ea03a8ee-be11-4474-8b11-12ff6edc6407 service nova] [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] Received event network-changed-916f0a29-ac67-4ea8-b7f9-7d34e8b0b737 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 540.391514] env[61273]: DEBUG nova.compute.manager [req-6a040399-13ac-4508-97df-eca6351caa7b req-ea03a8ee-be11-4474-8b11-12ff6edc6407 service nova] [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] Refreshing instance network info cache due to event network-changed-916f0a29-ac67-4ea8-b7f9-7d34e8b0b737. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 540.391710] env[61273]: DEBUG oslo_concurrency.lockutils [req-6a040399-13ac-4508-97df-eca6351caa7b req-ea03a8ee-be11-4474-8b11-12ff6edc6407 service nova] Acquiring lock "refresh_cache-40709d2b-cc33-4ac0-9a13-731442d7edff" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 540.396622] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c3d18d3213fd4af59acdc73e601431fa [ 540.399584] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a132435d-e2ce-4ef9-9625-f588f5e4b1be {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.423640] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5 could not be found. [ 540.423925] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 540.424134] env[61273]: INFO nova.compute.manager [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] Took 0.04 seconds to destroy the instance on the hypervisor. [ 540.424373] env[61273]: DEBUG oslo.service.loopingcall [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 540.424624] env[61273]: DEBUG nova.compute.manager [-] [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 540.424724] env[61273]: DEBUG nova.network.neutron [-] [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 540.460618] env[61273]: DEBUG nova.network.neutron [-] [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 540.461295] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 3587e3b2f34a41c59b50ed87d407ce06 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 540.470045] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3587e3b2f34a41c59b50ed87d407ce06 [ 540.553631] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Releasing lock "refresh_cache-40709d2b-cc33-4ac0-9a13-731442d7edff" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 540.554154] env[61273]: DEBUG nova.compute.manager [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 540.554326] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 540.554638] env[61273]: DEBUG oslo_concurrency.lockutils [req-6a040399-13ac-4508-97df-eca6351caa7b req-ea03a8ee-be11-4474-8b11-12ff6edc6407 service nova] Acquired lock "refresh_cache-40709d2b-cc33-4ac0-9a13-731442d7edff" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 540.554804] env[61273]: DEBUG nova.network.neutron [req-6a040399-13ac-4508-97df-eca6351caa7b req-ea03a8ee-be11-4474-8b11-12ff6edc6407 service nova] [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] Refreshing network info cache for port 916f0a29-ac67-4ea8-b7f9-7d34e8b0b737 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 540.555229] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-6a040399-13ac-4508-97df-eca6351caa7b req-ea03a8ee-be11-4474-8b11-12ff6edc6407 service nova] Expecting reply to msg 9803626a17344386b0da2eb6fe292fd8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 540.556042] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7462b60d-ead2-4f81-a43c-7e423e1f2755 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.565377] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd093dae-1eb1-4fcc-96c7-4a034f12021f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.577567] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9803626a17344386b0da2eb6fe292fd8 [ 540.594397] env[61273]: INFO nova.compute.manager [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Rebuilding instance [ 540.597276] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 40709d2b-cc33-4ac0-9a13-731442d7edff could not be found. [ 540.597276] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 540.597276] env[61273]: INFO nova.compute.manager [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] Took 0.04 seconds to destroy the instance on the hypervisor. [ 540.598044] env[61273]: DEBUG oslo.service.loopingcall [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 540.598161] env[61273]: DEBUG nova.compute.manager [-] [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 540.598340] env[61273]: DEBUG nova.network.neutron [-] [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 540.633646] env[61273]: DEBUG nova.network.neutron [-] [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 540.633992] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg dee28a2935bb49a698a8e53a477d1f0d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 540.647271] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dee28a2935bb49a698a8e53a477d1f0d [ 540.654171] env[61273]: DEBUG nova.compute.manager [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Checking state {{(pid=61273) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 540.655343] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baf51943-c649-402b-9403-aa255f2ddb32 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.664543] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Expecting reply to msg edb4ac4a190f460c893c14e8c51d3f4d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 540.756083] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg edb4ac4a190f460c893c14e8c51d3f4d [ 540.761149] env[61273]: DEBUG nova.compute.utils [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 540.761828] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] Expecting reply to msg fe7fc8f835ad4f3288fb3fc838b8bc2d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 540.767444] env[61273]: DEBUG nova.compute.manager [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 540.767611] env[61273]: DEBUG nova.network.neutron [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 540.783588] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fe7fc8f835ad4f3288fb3fc838b8bc2d [ 540.892120] env[61273]: INFO nova.scheduler.client.report [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Deleted allocations for instance 52eb3ce9-696c-445c-b82d-90663a9e8b21 [ 540.897116] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg a8099f86bdb94c7ebff08747827f64c4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 540.910971] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a8099f86bdb94c7ebff08747827f64c4 [ 540.936286] env[61273]: DEBUG nova.network.neutron [req-417b81b8-0afc-44f7-bc0a-abbe4b974403 req-8f74cdf0-fd29-4b0a-ad99-04440fb5fcd8 service nova] [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 540.939034] env[61273]: DEBUG nova.policy [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '206eaa30e3aa4fa982e48dd99c20f44a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '25a75c216e55475b8b8e05870de35b29', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 540.964248] env[61273]: DEBUG nova.network.neutron [-] [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 540.964803] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 63024967eed94a998d5248153eec3ba4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 540.979984] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 63024967eed94a998d5248153eec3ba4 [ 541.003445] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Acquiring lock "5cddeea1-7558-4c12-afdc-2ea7a706881a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 541.003850] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Lock "5cddeea1-7558-4c12-afdc-2ea7a706881a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 541.099108] env[61273]: DEBUG nova.network.neutron [req-6a040399-13ac-4508-97df-eca6351caa7b req-ea03a8ee-be11-4474-8b11-12ff6edc6407 service nova] [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 541.137209] env[61273]: DEBUG nova.network.neutron [-] [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 541.137649] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg c7f1cba55f1c45c79ea59656e40b3e87 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 541.149929] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c7f1cba55f1c45c79ea59656e40b3e87 [ 541.151273] env[61273]: DEBUG nova.network.neutron [req-417b81b8-0afc-44f7-bc0a-abbe4b974403 req-8f74cdf0-fd29-4b0a-ad99-04440fb5fcd8 service nova] [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 541.151783] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-417b81b8-0afc-44f7-bc0a-abbe4b974403 req-8f74cdf0-fd29-4b0a-ad99-04440fb5fcd8 service nova] Expecting reply to msg d4ece36f67a34609bcc1e981904a9029 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 541.164185] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d4ece36f67a34609bcc1e981904a9029 [ 541.168387] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Powering off the VM {{(pid=61273) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 541.171075] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8e590153-166b-42d2-8e91-39f4e9e59062 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.175342] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a7896ff-4871-420c-b446-96b07f0ac6d5 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.185356] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6a370c7-b127-47fe-a345-4c4635b61d38 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.188892] env[61273]: DEBUG oslo_vmware.api [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Waiting for the task: (returnval){ [ 541.188892] env[61273]: value = "task-375226" [ 541.188892] env[61273]: _type = "Task" [ 541.188892] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 541.221349] env[61273]: DEBUG nova.network.neutron [req-6a040399-13ac-4508-97df-eca6351caa7b req-ea03a8ee-be11-4474-8b11-12ff6edc6407 service nova] [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 541.221937] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-6a040399-13ac-4508-97df-eca6351caa7b req-ea03a8ee-be11-4474-8b11-12ff6edc6407 service nova] Expecting reply to msg cc4edb01dc1f4080bb9c464554a998fc in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 541.236772] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c47c577-bd77-4f19-9172-5ec19b927331 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.243636] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cc4edb01dc1f4080bb9c464554a998fc [ 541.244789] env[61273]: DEBUG oslo_vmware.api [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Task: {'id': task-375226, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 541.250738] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a74b6028-d739-4503-a931-a3e9904c2b9c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.272253] env[61273]: DEBUG nova.compute.manager [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 541.274640] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] Expecting reply to msg 733b7eb4e7504643a9dbd25778b65bcf in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 541.277352] env[61273]: DEBUG nova.compute.provider_tree [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 541.277940] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Expecting reply to msg 1ee9379535e9440b8e494e3f8bbf6053 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 541.292228] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1ee9379535e9440b8e494e3f8bbf6053 [ 541.326439] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 733b7eb4e7504643a9dbd25778b65bcf [ 541.400235] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e9aaa151-a991-4861-aeec-17e30c5d48e5 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Lock "52eb3ce9-696c-445c-b82d-90663a9e8b21" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.259s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 541.400829] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] Expecting reply to msg 4e8f43708f134bc384db444abc689095 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 541.401729] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Lock "52eb3ce9-696c-445c-b82d-90663a9e8b21" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 7.819s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 541.405166] env[61273]: INFO nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] [instance: 52eb3ce9-696c-445c-b82d-90663a9e8b21] During sync_power_state the instance has a pending task (spawning). Skip. [ 541.405166] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Lock "52eb3ce9-696c-445c-b82d-90663a9e8b21" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 541.418448] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4e8f43708f134bc384db444abc689095 [ 541.469620] env[61273]: INFO nova.compute.manager [-] [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] Took 1.04 seconds to deallocate network for instance. [ 541.469620] env[61273]: DEBUG nova.compute.claims [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 541.469620] env[61273]: DEBUG oslo_concurrency.lockutils [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 541.640286] env[61273]: INFO nova.compute.manager [-] [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] Took 1.04 seconds to deallocate network for instance. [ 541.642679] env[61273]: DEBUG nova.compute.claims [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 541.642870] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 541.653573] env[61273]: DEBUG oslo_concurrency.lockutils [req-417b81b8-0afc-44f7-bc0a-abbe4b974403 req-8f74cdf0-fd29-4b0a-ad99-04440fb5fcd8 service nova] Releasing lock "refresh_cache-f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 541.653873] env[61273]: DEBUG nova.compute.manager [req-417b81b8-0afc-44f7-bc0a-abbe4b974403 req-8f74cdf0-fd29-4b0a-ad99-04440fb5fcd8 service nova] [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] Received event network-vif-deleted-caf04a4d-9a60-4997-9d4d-89dfc0470281 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 541.703652] env[61273]: DEBUG oslo_vmware.api [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Task: {'id': task-375226, 'name': PowerOffVM_Task, 'duration_secs': 0.127789} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 541.703796] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Powered off the VM {{(pid=61273) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 541.704129] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 541.704959] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0fd3a4d-fc34-44c1-a027-8e965e39e45d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.711630] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Unregistering the VM {{(pid=61273) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 541.711872] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6cca5eee-d2ab-4bec-b577-9acfa6e2b7eb {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.740345] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Unregistered the VM {{(pid=61273) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 541.740345] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Deleting contents of the VM from datastore datastore1 {{(pid=61273) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 541.740345] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Deleting the datastore file [datastore1] 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2 {{(pid=61273) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 541.740345] env[61273]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-68c87dd3-9721-4ef0-bb1b-999bad9efdee {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.740780] env[61273]: DEBUG oslo_concurrency.lockutils [req-6a040399-13ac-4508-97df-eca6351caa7b req-ea03a8ee-be11-4474-8b11-12ff6edc6407 service nova] Releasing lock "refresh_cache-40709d2b-cc33-4ac0-9a13-731442d7edff" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 541.742932] env[61273]: DEBUG oslo_vmware.api [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Waiting for the task: (returnval){ [ 541.742932] env[61273]: value = "task-375228" [ 541.742932] env[61273]: _type = "Task" [ 541.742932] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 541.753013] env[61273]: DEBUG oslo_vmware.api [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Task: {'id': task-375228, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 541.782744] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] Expecting reply to msg 907c17aacf3f4e1caa349a72373bf6ef in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 541.784293] env[61273]: DEBUG nova.scheduler.client.report [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 541.786625] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Expecting reply to msg 2546668932554c3aad4f844c08f273d5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 541.808130] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2546668932554c3aad4f844c08f273d5 [ 541.836845] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 907c17aacf3f4e1caa349a72373bf6ef [ 541.903754] env[61273]: DEBUG nova.compute.manager [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 541.905486] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] Expecting reply to msg cd330584206a4dd2a196d5e8eeb59ac4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 541.931798] env[61273]: DEBUG nova.network.neutron [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] Successfully created port: 14197306-66ad-4077-914c-b79fd0a658db {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 541.953293] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cd330584206a4dd2a196d5e8eeb59ac4 [ 542.252422] env[61273]: DEBUG oslo_vmware.api [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Task: {'id': task-375228, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.101343} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 542.252679] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Deleted the datastore file {{(pid=61273) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 542.252863] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Deleted contents of the VM from datastore datastore1 {{(pid=61273) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 542.253027] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 542.254795] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Expecting reply to msg 07dcfbd8cf1b49728875d5163cae8001 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 542.289343] env[61273]: DEBUG nova.compute.manager [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 542.292043] env[61273]: DEBUG oslo_concurrency.lockutils [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.035s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 542.292606] env[61273]: ERROR nova.compute.manager [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: 986f91ac-e44f-474a-885a-bc097b396019] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 397126f4-dc7b-428e-9725-ba5c0ded1af3, please check neutron logs for more information. [ 542.292606] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] Traceback (most recent call last): [ 542.292606] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 542.292606] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] self.driver.spawn(context, instance, image_meta, [ 542.292606] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 542.292606] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] self._vmops.spawn(context, instance, image_meta, injected_files, [ 542.292606] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 542.292606] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] vm_ref = self.build_virtual_machine(instance, [ 542.292606] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 542.292606] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] vif_infos = vmwarevif.get_vif_info(self._session, [ 542.292606] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 542.292980] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] for vif in network_info: [ 542.292980] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 542.292980] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] return self._sync_wrapper(fn, *args, **kwargs) [ 542.292980] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 542.292980] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] self.wait() [ 542.292980] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 542.292980] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] self[:] = self._gt.wait() [ 542.292980] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 542.292980] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] return self._exit_event.wait() [ 542.292980] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 542.292980] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] result = hub.switch() [ 542.292980] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 542.292980] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] return self.greenlet.switch() [ 542.293336] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 542.293336] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] result = function(*args, **kwargs) [ 542.293336] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 542.293336] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] return func(*args, **kwargs) [ 542.293336] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 542.293336] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] raise e [ 542.293336] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 542.293336] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] nwinfo = self.network_api.allocate_for_instance( [ 542.293336] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 542.293336] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] created_port_ids = self._update_ports_for_instance( [ 542.293336] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 542.293336] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] with excutils.save_and_reraise_exception(): [ 542.293336] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 542.293689] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] self.force_reraise() [ 542.293689] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 542.293689] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] raise self.value [ 542.293689] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 542.293689] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] updated_port = self._update_port( [ 542.293689] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 542.293689] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] _ensure_no_port_binding_failure(port) [ 542.293689] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 542.293689] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] raise exception.PortBindingFailed(port_id=port['id']) [ 542.293689] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] nova.exception.PortBindingFailed: Binding failed for port 397126f4-dc7b-428e-9725-ba5c0ded1af3, please check neutron logs for more information. [ 542.293689] env[61273]: ERROR nova.compute.manager [instance: 986f91ac-e44f-474a-885a-bc097b396019] [ 542.294012] env[61273]: DEBUG nova.compute.utils [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: 986f91ac-e44f-474a-885a-bc097b396019] Binding failed for port 397126f4-dc7b-428e-9725-ba5c0ded1af3, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 542.294554] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 07dcfbd8cf1b49728875d5163cae8001 [ 542.295116] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.651s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 542.296653] env[61273]: INFO nova.compute.claims [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 228821ca-e981-405b-8952-8a1718103d3c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 542.299460] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Expecting reply to msg 28a2f3c2771d4e719f4c5671acf58929 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 542.299460] env[61273]: DEBUG nova.compute.manager [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: 986f91ac-e44f-474a-885a-bc097b396019] Build of instance 986f91ac-e44f-474a-885a-bc097b396019 was re-scheduled: Binding failed for port 397126f4-dc7b-428e-9725-ba5c0ded1af3, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 542.300354] env[61273]: DEBUG nova.compute.manager [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: 986f91ac-e44f-474a-885a-bc097b396019] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 542.300354] env[61273]: DEBUG oslo_concurrency.lockutils [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Acquiring lock "refresh_cache-986f91ac-e44f-474a-885a-bc097b396019" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 542.300354] env[61273]: DEBUG oslo_concurrency.lockutils [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Acquired lock "refresh_cache-986f91ac-e44f-474a-885a-bc097b396019" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 542.300354] env[61273]: DEBUG nova.network.neutron [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: 986f91ac-e44f-474a-885a-bc097b396019] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 542.309322] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Expecting reply to msg c3824279899d44a988a59fd1880b02c2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 542.318063] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c3824279899d44a988a59fd1880b02c2 [ 542.320629] env[61273]: DEBUG nova.virt.hardware [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 542.320856] env[61273]: DEBUG nova.virt.hardware [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 542.321008] env[61273]: DEBUG nova.virt.hardware [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 542.321182] env[61273]: DEBUG nova.virt.hardware [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 542.321323] env[61273]: DEBUG nova.virt.hardware [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 542.321478] env[61273]: DEBUG nova.virt.hardware [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 542.321666] env[61273]: DEBUG nova.virt.hardware [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 542.321816] env[61273]: DEBUG nova.virt.hardware [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 542.321977] env[61273]: DEBUG nova.virt.hardware [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 542.322248] env[61273]: DEBUG nova.virt.hardware [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 542.322309] env[61273]: DEBUG nova.virt.hardware [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 542.323335] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57198f19-c792-4438-8d63-2bd69d0ec1db {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.336939] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2a4943a-b164-4adc-b593-32da76011783 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.341153] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 28a2f3c2771d4e719f4c5671acf58929 [ 542.435473] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 542.760204] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Expecting reply to msg d03f3c7b81c94897b566b8791ff5584d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 542.798525] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d03f3c7b81c94897b566b8791ff5584d [ 542.812810] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Expecting reply to msg ec9333f94073485dad74cc9a50c7df44 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 542.821786] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ec9333f94073485dad74cc9a50c7df44 [ 542.839912] env[61273]: DEBUG nova.network.neutron [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: 986f91ac-e44f-474a-885a-bc097b396019] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 542.898862] env[61273]: DEBUG nova.network.neutron [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: 986f91ac-e44f-474a-885a-bc097b396019] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 542.899377] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Expecting reply to msg 6ac80658315d432f85dc4f4085053b9a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 542.908088] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6ac80658315d432f85dc4f4085053b9a [ 543.169862] env[61273]: DEBUG oslo_concurrency.lockutils [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Acquiring lock "7f8b08d4-3535-48ab-ba3f-a159511e2a64" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 543.170252] env[61273]: DEBUG oslo_concurrency.lockutils [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Lock "7f8b08d4-3535-48ab-ba3f-a159511e2a64" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 543.306829] env[61273]: DEBUG nova.virt.hardware [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 543.307071] env[61273]: DEBUG nova.virt.hardware [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 543.307222] env[61273]: DEBUG nova.virt.hardware [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 543.307400] env[61273]: DEBUG nova.virt.hardware [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 543.307581] env[61273]: DEBUG nova.virt.hardware [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 543.307743] env[61273]: DEBUG nova.virt.hardware [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 543.307941] env[61273]: DEBUG nova.virt.hardware [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 543.308255] env[61273]: DEBUG nova.virt.hardware [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 543.308448] env[61273]: DEBUG nova.virt.hardware [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 543.308644] env[61273]: DEBUG nova.virt.hardware [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 543.308841] env[61273]: DEBUG nova.virt.hardware [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 543.309768] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdc8664c-36cf-4c45-a714-7b5bd4645084 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.321177] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-104db6c5-cedc-4a4e-8137-47a33cdcf990 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.336723] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Instance VIF info [] {{(pid=61273) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 543.343177] env[61273]: DEBUG oslo.service.loopingcall [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 543.346258] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Creating VM on the ESX host {{(pid=61273) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 543.346751] env[61273]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-24afc69b-0aab-4151-8834-ae753e103b8c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.369672] env[61273]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 543.369672] env[61273]: value = "task-375229" [ 543.369672] env[61273]: _type = "Task" [ 543.369672] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 543.382346] env[61273]: DEBUG oslo_vmware.api [-] Task: {'id': task-375229, 'name': CreateVM_Task} progress is 5%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 543.402255] env[61273]: DEBUG oslo_concurrency.lockutils [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Releasing lock "refresh_cache-986f91ac-e44f-474a-885a-bc097b396019" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 543.402508] env[61273]: DEBUG nova.compute.manager [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 543.402696] env[61273]: DEBUG nova.compute.manager [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: 986f91ac-e44f-474a-885a-bc097b396019] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 543.402879] env[61273]: DEBUG nova.network.neutron [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: 986f91ac-e44f-474a-885a-bc097b396019] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 543.430800] env[61273]: DEBUG nova.network.neutron [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: 986f91ac-e44f-474a-885a-bc097b396019] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 543.431425] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Expecting reply to msg 0cb0fc1eac834be1aac7e3123391a76f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 543.440830] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0cb0fc1eac834be1aac7e3123391a76f [ 543.605259] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dfe5187-c9e4-41bc-a4b1-c6db89f07b61 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.614890] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6733896-03bc-4826-a54a-a0d04db71db4 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.650891] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aa66a7d-8be7-46e2-9b2b-4a59075a6336 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.658820] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54075898-3c59-43f6-a53e-448fd486403d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.675134] env[61273]: DEBUG nova.compute.provider_tree [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 543.675919] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Expecting reply to msg e4d956020d1f4306a1c42671f16c99f6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 543.689836] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e4d956020d1f4306a1c42671f16c99f6 [ 543.879343] env[61273]: DEBUG oslo_vmware.api [-] Task: {'id': task-375229, 'name': CreateVM_Task, 'duration_secs': 0.292291} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 543.879615] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Created VM on the ESX host {{(pid=61273) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 543.880410] env[61273]: DEBUG oslo_vmware.service [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a72febf5-8d8d-4781-9e48-d832c5ad6404 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.886175] env[61273]: DEBUG oslo_concurrency.lockutils [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 543.886331] env[61273]: DEBUG oslo_concurrency.lockutils [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 543.886710] env[61273]: DEBUG oslo_concurrency.lockutils [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 543.886954] env[61273]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e485256-81b6-4e0f-90cf-dea09224d0a0 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.891701] env[61273]: DEBUG oslo_vmware.api [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Waiting for the task: (returnval){ [ 543.891701] env[61273]: value = "session[527e49f3-ce92-061c-8407-29c8a2392124]52a2093b-8198-5464-c529-ea179c5346dd" [ 543.891701] env[61273]: _type = "Task" [ 543.891701] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 543.900178] env[61273]: DEBUG oslo_vmware.api [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]52a2093b-8198-5464-c529-ea179c5346dd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 543.935759] env[61273]: DEBUG nova.network.neutron [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: 986f91ac-e44f-474a-885a-bc097b396019] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 543.935759] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Expecting reply to msg 69235e9be082406b9e3b8e36f95a8600 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 543.942982] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 69235e9be082406b9e3b8e36f95a8600 [ 544.156797] env[61273]: DEBUG nova.compute.manager [req-c78c954e-171a-49cd-ad49-2adacc841962 req-883ed6ce-f886-46b1-a5ee-2df1ee6eb5bd service nova] [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] Received event network-vif-deleted-916f0a29-ac67-4ea8-b7f9-7d34e8b0b737 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 544.182531] env[61273]: DEBUG nova.scheduler.client.report [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 544.185097] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Expecting reply to msg 1f937bf8adde4e92bcde6e99ff635b3f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 544.198201] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1f937bf8adde4e92bcde6e99ff635b3f [ 544.408399] env[61273]: DEBUG oslo_concurrency.lockutils [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 544.408674] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Processing image 4a9e718e-a6a1-4b4a-b567-8e55529b2d5b {{(pid=61273) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 544.408903] env[61273]: DEBUG oslo_concurrency.lockutils [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 544.409626] env[61273]: DEBUG oslo_concurrency.lockutils [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 544.409626] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61273) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 544.409626] env[61273]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a304ab04-2257-46ab-ba34-fa922fa2cc12 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.431706] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61273) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 544.431917] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61273) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 544.433540] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a37c9824-1dd7-4de8-9baf-ae602cb1345c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.436995] env[61273]: INFO nova.compute.manager [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: 986f91ac-e44f-474a-885a-bc097b396019] Took 1.03 seconds to deallocate network for instance. [ 544.438668] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Expecting reply to msg cca020aa83f44db7ac2c75b8f56b04d1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 544.451401] env[61273]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-07e7f7cf-7371-4605-983c-ee73ec57c274 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.454431] env[61273]: DEBUG oslo_vmware.api [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Waiting for the task: (returnval){ [ 544.454431] env[61273]: value = "session[527e49f3-ce92-061c-8407-29c8a2392124]526d695f-898a-0678-862d-81c494524f48" [ 544.454431] env[61273]: _type = "Task" [ 544.454431] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 544.465416] env[61273]: DEBUG oslo_vmware.api [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]526d695f-898a-0678-862d-81c494524f48, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 544.498962] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cca020aa83f44db7ac2c75b8f56b04d1 [ 544.534087] env[61273]: ERROR nova.compute.manager [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 14197306-66ad-4077-914c-b79fd0a658db, please check neutron logs for more information. [ 544.534087] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 544.534087] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 544.534087] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 544.534087] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 544.534087] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 544.534087] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 544.534087] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 544.534087] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 544.534087] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 544.534087] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 544.534087] env[61273]: ERROR nova.compute.manager raise self.value [ 544.534087] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 544.534087] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 544.534087] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 544.534087] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 544.534658] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 544.534658] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 544.534658] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 14197306-66ad-4077-914c-b79fd0a658db, please check neutron logs for more information. [ 544.534658] env[61273]: ERROR nova.compute.manager [ 544.535525] env[61273]: Traceback (most recent call last): [ 544.535669] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 544.535669] env[61273]: listener.cb(fileno) [ 544.535791] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 544.535791] env[61273]: result = function(*args, **kwargs) [ 544.535862] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 544.535862] env[61273]: return func(*args, **kwargs) [ 544.535950] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 544.535950] env[61273]: raise e [ 544.536053] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 544.536053] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 544.536117] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 544.536117] env[61273]: created_port_ids = self._update_ports_for_instance( [ 544.536185] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 544.536185] env[61273]: with excutils.save_and_reraise_exception(): [ 544.536424] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 544.536424] env[61273]: self.force_reraise() [ 544.536596] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 544.536596] env[61273]: raise self.value [ 544.536693] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 544.536693] env[61273]: updated_port = self._update_port( [ 544.536757] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 544.536757] env[61273]: _ensure_no_port_binding_failure(port) [ 544.536861] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 544.536861] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 544.536947] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 14197306-66ad-4077-914c-b79fd0a658db, please check neutron logs for more information. [ 544.536994] env[61273]: Removing descriptor: 19 [ 544.538171] env[61273]: ERROR nova.compute.manager [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 14197306-66ad-4077-914c-b79fd0a658db, please check neutron logs for more information. [ 544.538171] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] Traceback (most recent call last): [ 544.538171] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 544.538171] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] yield resources [ 544.538171] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 544.538171] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] self.driver.spawn(context, instance, image_meta, [ 544.538171] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 544.538171] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 544.538171] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 544.538171] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] vm_ref = self.build_virtual_machine(instance, [ 544.538171] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 544.538552] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] vif_infos = vmwarevif.get_vif_info(self._session, [ 544.538552] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 544.538552] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] for vif in network_info: [ 544.538552] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 544.538552] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] return self._sync_wrapper(fn, *args, **kwargs) [ 544.538552] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 544.538552] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] self.wait() [ 544.538552] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 544.538552] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] self[:] = self._gt.wait() [ 544.538552] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 544.538552] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] return self._exit_event.wait() [ 544.538552] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 544.538552] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] result = hub.switch() [ 544.538946] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 544.538946] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] return self.greenlet.switch() [ 544.538946] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 544.538946] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] result = function(*args, **kwargs) [ 544.538946] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 544.538946] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] return func(*args, **kwargs) [ 544.538946] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 544.538946] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] raise e [ 544.538946] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 544.538946] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] nwinfo = self.network_api.allocate_for_instance( [ 544.538946] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 544.538946] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] created_port_ids = self._update_ports_for_instance( [ 544.538946] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 544.539341] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] with excutils.save_and_reraise_exception(): [ 544.539341] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 544.539341] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] self.force_reraise() [ 544.539341] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 544.539341] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] raise self.value [ 544.539341] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 544.539341] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] updated_port = self._update_port( [ 544.539341] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 544.539341] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] _ensure_no_port_binding_failure(port) [ 544.539341] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 544.539341] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] raise exception.PortBindingFailed(port_id=port['id']) [ 544.539341] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] nova.exception.PortBindingFailed: Binding failed for port 14197306-66ad-4077-914c-b79fd0a658db, please check neutron logs for more information. [ 544.539341] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] [ 544.540335] env[61273]: INFO nova.compute.manager [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] Terminating instance [ 544.543041] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] Acquiring lock "refresh_cache-1784917b-8a7e-4974-b8b3-f8f2b3db019a" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 544.543440] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] Acquired lock "refresh_cache-1784917b-8a7e-4974-b8b3-f8f2b3db019a" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 544.543880] env[61273]: DEBUG nova.network.neutron [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 544.544526] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] Expecting reply to msg 811c3bfa2a2748eaa3f48486bb2307d8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 544.552255] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 811c3bfa2a2748eaa3f48486bb2307d8 [ 544.692039] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.393s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 544.692039] env[61273]: DEBUG nova.compute.manager [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 228821ca-e981-405b-8952-8a1718103d3c] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 544.692039] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Expecting reply to msg 86f49deaaa37493d9bdbec5cf50a3151 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 544.692039] env[61273]: DEBUG oslo_concurrency.lockutils [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.604s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 544.693437] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] Expecting reply to msg 7169071e8964422c888508089d07fd4a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 544.746563] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 86f49deaaa37493d9bdbec5cf50a3151 [ 544.746563] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7169071e8964422c888508089d07fd4a [ 544.943072] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Expecting reply to msg b65cc077939d4ef5816b2923a3bce991 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 544.972329] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Preparing fetch location {{(pid=61273) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 544.972628] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Creating directory with path [datastore2] vmware_temp/f15c1746-9ed8-455a-833e-dbb4c26f39ad/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b {{(pid=61273) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 544.972887] env[61273]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-888c5b13-6d7e-4bd3-acc0-2a9462dbef15 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.996189] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Created directory with path [datastore2] vmware_temp/f15c1746-9ed8-455a-833e-dbb4c26f39ad/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b {{(pid=61273) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 544.997349] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Fetch image to [datastore2] vmware_temp/f15c1746-9ed8-455a-833e-dbb4c26f39ad/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/tmp-sparse.vmdk {{(pid=61273) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 544.997575] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Downloading image file data 4a9e718e-a6a1-4b4a-b567-8e55529b2d5b to [datastore2] vmware_temp/f15c1746-9ed8-455a-833e-dbb4c26f39ad/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/tmp-sparse.vmdk on the data store datastore2 {{(pid=61273) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 544.998361] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a5ea64b-7221-47c1-b8c5-b2cff0183475 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.002204] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b65cc077939d4ef5816b2923a3bce991 [ 545.007751] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7527e940-d66c-420f-b24e-edd09bcdc833 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.029707] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dfac154-26d6-490b-9c3a-55e4221ae258 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.083850] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-455b47ba-3f96-4380-9f36-16489be47efb {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.091736] env[61273]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-0609babe-7a98-43f4-8ff2-0fcd5f8e0224 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.116572] env[61273]: DEBUG nova.network.neutron [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 545.185305] env[61273]: DEBUG nova.virt.vmwareapi.images [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Downloading image file data 4a9e718e-a6a1-4b4a-b567-8e55529b2d5b to the data store datastore2 {{(pid=61273) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 545.197050] env[61273]: DEBUG nova.compute.utils [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 545.197674] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Expecting reply to msg 56ce70a080a24a3b96a961ec081c1cfd in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 545.198586] env[61273]: DEBUG nova.compute.manager [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 228821ca-e981-405b-8952-8a1718103d3c] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 545.198751] env[61273]: DEBUG nova.network.neutron [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 228821ca-e981-405b-8952-8a1718103d3c] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 545.230229] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 56ce70a080a24a3b96a961ec081c1cfd [ 545.256885] env[61273]: DEBUG oslo_concurrency.lockutils [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] Acquiring lock "7d1f7566-8e5e-476c-9d19-49ed7b16c308" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 545.257102] env[61273]: DEBUG oslo_concurrency.lockutils [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] Lock "7d1f7566-8e5e-476c-9d19-49ed7b16c308" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 545.259947] env[61273]: DEBUG oslo_vmware.rw_handles [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f15c1746-9ed8-455a-833e-dbb4c26f39ad/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61273) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 545.326458] env[61273]: DEBUG nova.network.neutron [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 545.326780] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] Expecting reply to msg 4b54315093664c64b2ca6f3b17dc74ba in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 545.346984] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4b54315093664c64b2ca6f3b17dc74ba [ 545.410860] env[61273]: DEBUG nova.policy [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6116fae5f64e41e09dd7d6f6fcab88dd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '67f289a0c54e46e18b97b47c11a9b1f5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 545.476977] env[61273]: INFO nova.scheduler.client.report [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Deleted allocations for instance 986f91ac-e44f-474a-885a-bc097b396019 [ 545.486950] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Expecting reply to msg 0bacdc1c51024d7cb32127d39e1e421c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 545.505145] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0bacdc1c51024d7cb32127d39e1e421c [ 545.612300] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11f63ec0-8bdb-4e9d-b872-e8acbbc0c20b {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.621359] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57b78406-d700-43f1-ac50-6af674810db1 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.665728] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fec2b87-02b5-42e1-b82e-3cfa1d00865f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.676540] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d31ad5e8-f5a5-4606-a43a-c357e251b922 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.694074] env[61273]: DEBUG nova.compute.provider_tree [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 545.694606] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] Expecting reply to msg b05631be396047459104f2bcbdb10eb2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 545.704667] env[61273]: DEBUG nova.compute.manager [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 228821ca-e981-405b-8952-8a1718103d3c] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 545.706327] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Expecting reply to msg cbf59c8c9f0c4f6aa4ae00aae5aa207c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 545.720735] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b05631be396047459104f2bcbdb10eb2 [ 545.753240] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cbf59c8c9f0c4f6aa4ae00aae5aa207c [ 545.832092] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] Releasing lock "refresh_cache-1784917b-8a7e-4974-b8b3-f8f2b3db019a" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 545.832724] env[61273]: DEBUG nova.compute.manager [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 545.832942] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 545.833327] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b99915da-2d26-417a-bae4-d9cea4d58295 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.844832] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f3ec52f-2c9d-49f6-a1ed-add689539148 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.871428] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1784917b-8a7e-4974-b8b3-f8f2b3db019a could not be found. [ 545.871768] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 545.871989] env[61273]: INFO nova.compute.manager [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] Took 0.04 seconds to destroy the instance on the hypervisor. [ 545.872274] env[61273]: DEBUG oslo.service.loopingcall [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 545.872529] env[61273]: DEBUG nova.compute.manager [-] [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 545.872648] env[61273]: DEBUG nova.network.neutron [-] [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 545.920858] env[61273]: DEBUG nova.network.neutron [-] [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 545.921249] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg a2c696f678154905aaf0050b783164eb in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 545.928709] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a2c696f678154905aaf0050b783164eb [ 546.002047] env[61273]: DEBUG oslo_concurrency.lockutils [None req-41238e4a-b036-49d6-8907-007255bd521e tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Lock "986f91ac-e44f-474a-885a-bc097b396019" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.966s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 546.006170] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] Expecting reply to msg cdf02d83a7d44772918d77c5d0053a72 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 546.009324] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Lock "986f91ac-e44f-474a-885a-bc097b396019" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 12.426s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 546.012896] env[61273]: INFO nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] [instance: 986f91ac-e44f-474a-885a-bc097b396019] During sync_power_state the instance has a pending task (spawning). Skip. [ 546.013307] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Lock "986f91ac-e44f-474a-885a-bc097b396019" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.004s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 546.019952] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cdf02d83a7d44772918d77c5d0053a72 [ 546.026013] env[61273]: DEBUG oslo_vmware.rw_handles [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Completed reading data from the image iterator. {{(pid=61273) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 546.026209] env[61273]: DEBUG oslo_vmware.rw_handles [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f15c1746-9ed8-455a-833e-dbb4c26f39ad/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61273) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 546.088465] env[61273]: DEBUG nova.virt.vmwareapi.images [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Downloaded image file data 4a9e718e-a6a1-4b4a-b567-8e55529b2d5b to vmware_temp/f15c1746-9ed8-455a-833e-dbb4c26f39ad/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/tmp-sparse.vmdk on the data store datastore2 {{(pid=61273) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 546.091756] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Caching image {{(pid=61273) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 546.092220] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Copying Virtual Disk [datastore2] vmware_temp/f15c1746-9ed8-455a-833e-dbb4c26f39ad/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/tmp-sparse.vmdk to [datastore2] vmware_temp/f15c1746-9ed8-455a-833e-dbb4c26f39ad/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk {{(pid=61273) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 546.092567] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f7a2bbd4-83c2-434c-acba-4367f6cf302c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.103553] env[61273]: DEBUG oslo_vmware.api [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Waiting for the task: (returnval){ [ 546.103553] env[61273]: value = "task-375230" [ 546.103553] env[61273]: _type = "Task" [ 546.103553] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 546.116582] env[61273]: DEBUG oslo_vmware.api [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Task: {'id': task-375230, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 546.187036] env[61273]: DEBUG nova.network.neutron [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 228821ca-e981-405b-8952-8a1718103d3c] Successfully created port: f8f7383d-3e1b-4747-82b6-44e40294ada9 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 546.200690] env[61273]: DEBUG nova.scheduler.client.report [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 546.203317] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] Expecting reply to msg 53cdef9a93ed44d4bc08d037cf22c328 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 546.215107] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 53cdef9a93ed44d4bc08d037cf22c328 [ 546.224067] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Expecting reply to msg 9ec43493c55d425ba25e4c8e6400e904 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 546.274102] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9ec43493c55d425ba25e4c8e6400e904 [ 546.423549] env[61273]: DEBUG nova.network.neutron [-] [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 546.424065] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg afd03a26b6f641fa8203fcff3fa7d398 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 546.432872] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg afd03a26b6f641fa8203fcff3fa7d398 [ 546.510834] env[61273]: DEBUG nova.compute.manager [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 546.512502] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] Expecting reply to msg 67573e852d8a409094a72d8fcc74dd61 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 546.562629] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 67573e852d8a409094a72d8fcc74dd61 [ 546.613944] env[61273]: DEBUG oslo_vmware.api [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Task: {'id': task-375230, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 546.714058] env[61273]: DEBUG oslo_concurrency.lockutils [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.022s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 546.714755] env[61273]: ERROR nova.compute.manager [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 4e2bb2c8-5529-4001-9487-43fbe76c2658, please check neutron logs for more information. [ 546.714755] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] Traceback (most recent call last): [ 546.714755] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 546.714755] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] self.driver.spawn(context, instance, image_meta, [ 546.714755] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 546.714755] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] self._vmops.spawn(context, instance, image_meta, injected_files, [ 546.714755] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 546.714755] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] vm_ref = self.build_virtual_machine(instance, [ 546.714755] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 546.714755] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] vif_infos = vmwarevif.get_vif_info(self._session, [ 546.714755] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 546.715113] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] for vif in network_info: [ 546.715113] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 546.715113] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] return self._sync_wrapper(fn, *args, **kwargs) [ 546.715113] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 546.715113] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] self.wait() [ 546.715113] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 546.715113] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] self[:] = self._gt.wait() [ 546.715113] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 546.715113] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] return self._exit_event.wait() [ 546.715113] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 546.715113] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] result = hub.switch() [ 546.715113] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 546.715113] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] return self.greenlet.switch() [ 546.715449] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 546.715449] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] result = function(*args, **kwargs) [ 546.715449] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 546.715449] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] return func(*args, **kwargs) [ 546.715449] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 546.715449] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] raise e [ 546.715449] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 546.715449] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] nwinfo = self.network_api.allocate_for_instance( [ 546.715449] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 546.715449] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] created_port_ids = self._update_ports_for_instance( [ 546.715449] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 546.715449] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] with excutils.save_and_reraise_exception(): [ 546.715449] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 546.715775] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] self.force_reraise() [ 546.715775] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 546.715775] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] raise self.value [ 546.715775] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 546.715775] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] updated_port = self._update_port( [ 546.715775] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 546.715775] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] _ensure_no_port_binding_failure(port) [ 546.715775] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 546.715775] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] raise exception.PortBindingFailed(port_id=port['id']) [ 546.715775] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] nova.exception.PortBindingFailed: Binding failed for port 4e2bb2c8-5529-4001-9487-43fbe76c2658, please check neutron logs for more information. [ 546.715775] env[61273]: ERROR nova.compute.manager [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] [ 546.716130] env[61273]: DEBUG nova.compute.utils [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] Binding failed for port 4e2bb2c8-5529-4001-9487-43fbe76c2658, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 546.716831] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 12.629s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 546.717001] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 546.717149] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61273) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 546.717440] env[61273]: DEBUG oslo_concurrency.lockutils [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 11.350s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 546.719337] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] Expecting reply to msg 27aa3f7a8a0548a4803f1103a4a337b1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 546.720959] env[61273]: DEBUG nova.compute.manager [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] Build of instance 020a5b3a-bda7-4a8a-9dad-948cee5a7373 was re-scheduled: Binding failed for port 4e2bb2c8-5529-4001-9487-43fbe76c2658, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 546.721412] env[61273]: DEBUG nova.compute.manager [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 546.721731] env[61273]: DEBUG oslo_concurrency.lockutils [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] Acquiring lock "refresh_cache-020a5b3a-bda7-4a8a-9dad-948cee5a7373" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 546.721835] env[61273]: DEBUG oslo_concurrency.lockutils [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] Acquired lock "refresh_cache-020a5b3a-bda7-4a8a-9dad-948cee5a7373" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 546.721990] env[61273]: DEBUG nova.network.neutron [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 546.722387] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] Expecting reply to msg eb302bebb68f494da4453fb941e37d8d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 546.724143] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12a67fdb-b0ee-492c-81e0-f7be5cea8954 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.728311] env[61273]: DEBUG nova.compute.manager [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 228821ca-e981-405b-8952-8a1718103d3c] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 546.729992] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eb302bebb68f494da4453fb941e37d8d [ 546.740930] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f86ca782-da38-464a-8ab4-678127f6503f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.769550] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36de841e-3d09-4354-b46a-33d44ce8086c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.783289] env[61273]: DEBUG nova.virt.hardware [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 546.783961] env[61273]: DEBUG nova.virt.hardware [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 546.784347] env[61273]: DEBUG nova.virt.hardware [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 546.784703] env[61273]: DEBUG nova.virt.hardware [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 546.785032] env[61273]: DEBUG nova.virt.hardware [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 546.785348] env[61273]: DEBUG nova.virt.hardware [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 546.785715] env[61273]: DEBUG nova.virt.hardware [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 546.786158] env[61273]: DEBUG nova.virt.hardware [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 546.786538] env[61273]: DEBUG nova.virt.hardware [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 546.786919] env[61273]: DEBUG nova.virt.hardware [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 546.787302] env[61273]: DEBUG nova.virt.hardware [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 546.790493] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-976d36b3-b2d4-40ae-9870-98376fff8ef6 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.799045] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 27aa3f7a8a0548a4803f1103a4a337b1 [ 546.808428] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd9292e7-b975-4505-8eaf-d3b7ce6ec838 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.814404] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-031f7e85-e854-4e94-bf7a-2e4d67fd91db {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.866131] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181735MB free_disk=141GB free_vcpus=48 pci_devices=None {{(pid=61273) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 546.866437] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 546.930596] env[61273]: INFO nova.compute.manager [-] [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] Took 1.06 seconds to deallocate network for instance. [ 546.933995] env[61273]: DEBUG nova.compute.claims [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 546.934166] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 546.941389] env[61273]: DEBUG oslo_concurrency.lockutils [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Acquiring lock "1d84bcbd-9b55-4d6f-b6c7-24391c8600a7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 546.942299] env[61273]: DEBUG oslo_concurrency.lockutils [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Lock "1d84bcbd-9b55-4d6f-b6c7-24391c8600a7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 547.034335] env[61273]: DEBUG oslo_concurrency.lockutils [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 547.115309] env[61273]: DEBUG oslo_vmware.api [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Task: {'id': task-375230, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.702439} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 547.115576] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Copied Virtual Disk [datastore2] vmware_temp/f15c1746-9ed8-455a-833e-dbb4c26f39ad/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/tmp-sparse.vmdk to [datastore2] vmware_temp/f15c1746-9ed8-455a-833e-dbb4c26f39ad/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk {{(pid=61273) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 547.116071] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Deleting the datastore file [datastore2] vmware_temp/f15c1746-9ed8-455a-833e-dbb4c26f39ad/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/tmp-sparse.vmdk {{(pid=61273) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 547.116071] env[61273]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0b066dfe-6706-4f63-9213-d0565859137c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.126870] env[61273]: DEBUG oslo_vmware.api [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Waiting for the task: (returnval){ [ 547.126870] env[61273]: value = "task-375231" [ 547.126870] env[61273]: _type = "Task" [ 547.126870] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 547.139327] env[61273]: DEBUG oslo_vmware.api [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Task: {'id': task-375231, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 547.267164] env[61273]: DEBUG nova.network.neutron [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 547.415893] env[61273]: DEBUG nova.network.neutron [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 547.416446] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] Expecting reply to msg 3ad57c605cc043f9bc7d4d82cc178761 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 547.425848] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3ad57c605cc043f9bc7d4d82cc178761 [ 547.526784] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c74806d-fd26-4149-ae98-f313dad8d4d3 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.533978] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fc377aa-19fb-4e42-850a-9f4ed55d027f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.568592] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bf59f10-b36b-44ba-b4bf-4abe3c23674c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.575488] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c874b74c-e478-4a9f-a7a4-dea9737ebb43 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.588685] env[61273]: DEBUG nova.compute.provider_tree [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 547.589222] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] Expecting reply to msg f071eb6591e44daca2b1e343bf7d55b5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 547.596648] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f071eb6591e44daca2b1e343bf7d55b5 [ 547.640202] env[61273]: DEBUG oslo_vmware.api [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Task: {'id': task-375231, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.024186} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 547.640347] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Deleted the datastore file {{(pid=61273) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 547.640588] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Moving file from [datastore2] vmware_temp/f15c1746-9ed8-455a-833e-dbb4c26f39ad/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b to [datastore2] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b. {{(pid=61273) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 547.640906] env[61273]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-47e50f1f-6a24-4e7e-815c-040cd50d233a {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.652803] env[61273]: DEBUG oslo_vmware.api [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Waiting for the task: (returnval){ [ 547.652803] env[61273]: value = "task-375232" [ 547.652803] env[61273]: _type = "Task" [ 547.652803] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 547.664602] env[61273]: DEBUG oslo_vmware.api [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Task: {'id': task-375232, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 547.862810] env[61273]: DEBUG nova.compute.manager [req-cdd9a2fd-ae3f-482d-b598-c120add357e9 req-23b6b77b-4fd0-4708-a6fd-b006fe3e12df service nova] [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] Received event network-changed-14197306-66ad-4077-914c-b79fd0a658db {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 547.862999] env[61273]: DEBUG nova.compute.manager [req-cdd9a2fd-ae3f-482d-b598-c120add357e9 req-23b6b77b-4fd0-4708-a6fd-b006fe3e12df service nova] [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] Refreshing instance network info cache due to event network-changed-14197306-66ad-4077-914c-b79fd0a658db. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 547.863283] env[61273]: DEBUG oslo_concurrency.lockutils [req-cdd9a2fd-ae3f-482d-b598-c120add357e9 req-23b6b77b-4fd0-4708-a6fd-b006fe3e12df service nova] Acquiring lock "refresh_cache-1784917b-8a7e-4974-b8b3-f8f2b3db019a" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 547.863389] env[61273]: DEBUG oslo_concurrency.lockutils [req-cdd9a2fd-ae3f-482d-b598-c120add357e9 req-23b6b77b-4fd0-4708-a6fd-b006fe3e12df service nova] Acquired lock "refresh_cache-1784917b-8a7e-4974-b8b3-f8f2b3db019a" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 547.863575] env[61273]: DEBUG nova.network.neutron [req-cdd9a2fd-ae3f-482d-b598-c120add357e9 req-23b6b77b-4fd0-4708-a6fd-b006fe3e12df service nova] [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] Refreshing network info cache for port 14197306-66ad-4077-914c-b79fd0a658db {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 547.865875] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-cdd9a2fd-ae3f-482d-b598-c120add357e9 req-23b6b77b-4fd0-4708-a6fd-b006fe3e12df service nova] Expecting reply to msg 1b97ed50ac2b4d25bf1cb77a98c09b4c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 547.882358] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1b97ed50ac2b4d25bf1cb77a98c09b4c [ 547.919060] env[61273]: DEBUG oslo_concurrency.lockutils [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] Releasing lock "refresh_cache-020a5b3a-bda7-4a8a-9dad-948cee5a7373" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 547.919309] env[61273]: DEBUG nova.compute.manager [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 547.919492] env[61273]: DEBUG nova.compute.manager [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 547.919708] env[61273]: DEBUG nova.network.neutron [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 547.944645] env[61273]: DEBUG nova.network.neutron [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 547.945243] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] Expecting reply to msg 21ac02637834464eb8346dd5e244557e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 547.954856] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 21ac02637834464eb8346dd5e244557e [ 548.091504] env[61273]: DEBUG nova.scheduler.client.report [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 548.093992] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] Expecting reply to msg 69d8a86ff8dd4c0c86e838ce413906ee in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 548.116979] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 69d8a86ff8dd4c0c86e838ce413906ee [ 548.162045] env[61273]: DEBUG oslo_vmware.api [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Task: {'id': task-375232, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.023752} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 548.162302] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] File moved {{(pid=61273) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 548.162506] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Cleaning up location [datastore2] vmware_temp/f15c1746-9ed8-455a-833e-dbb4c26f39ad {{(pid=61273) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 548.162664] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Deleting the datastore file [datastore2] vmware_temp/f15c1746-9ed8-455a-833e-dbb4c26f39ad {{(pid=61273) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 548.162905] env[61273]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8ad2eb0d-6582-436a-b7b2-cfbf3c01148e {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.170277] env[61273]: DEBUG oslo_vmware.api [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Waiting for the task: (returnval){ [ 548.170277] env[61273]: value = "task-375233" [ 548.170277] env[61273]: _type = "Task" [ 548.170277] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 548.176600] env[61273]: DEBUG oslo_vmware.api [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Task: {'id': task-375233, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 548.313380] env[61273]: ERROR nova.compute.manager [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port f8f7383d-3e1b-4747-82b6-44e40294ada9, please check neutron logs for more information. [ 548.313380] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 548.313380] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 548.313380] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 548.313380] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 548.313380] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 548.313380] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 548.313380] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 548.313380] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 548.313380] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 548.313380] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 548.313380] env[61273]: ERROR nova.compute.manager raise self.value [ 548.313380] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 548.313380] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 548.313380] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 548.313380] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 548.314038] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 548.314038] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 548.314038] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port f8f7383d-3e1b-4747-82b6-44e40294ada9, please check neutron logs for more information. [ 548.314038] env[61273]: ERROR nova.compute.manager [ 548.314038] env[61273]: Traceback (most recent call last): [ 548.314038] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 548.314038] env[61273]: listener.cb(fileno) [ 548.314038] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 548.314038] env[61273]: result = function(*args, **kwargs) [ 548.314038] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 548.314038] env[61273]: return func(*args, **kwargs) [ 548.314038] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 548.314038] env[61273]: raise e [ 548.314038] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 548.314038] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 548.314038] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 548.314038] env[61273]: created_port_ids = self._update_ports_for_instance( [ 548.314038] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 548.314038] env[61273]: with excutils.save_and_reraise_exception(): [ 548.314038] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 548.314038] env[61273]: self.force_reraise() [ 548.314038] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 548.314038] env[61273]: raise self.value [ 548.314038] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 548.314038] env[61273]: updated_port = self._update_port( [ 548.314038] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 548.314038] env[61273]: _ensure_no_port_binding_failure(port) [ 548.314038] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 548.314038] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 548.314879] env[61273]: nova.exception.PortBindingFailed: Binding failed for port f8f7383d-3e1b-4747-82b6-44e40294ada9, please check neutron logs for more information. [ 548.314879] env[61273]: Removing descriptor: 15 [ 548.314879] env[61273]: ERROR nova.compute.manager [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 228821ca-e981-405b-8952-8a1718103d3c] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port f8f7383d-3e1b-4747-82b6-44e40294ada9, please check neutron logs for more information. [ 548.314879] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] Traceback (most recent call last): [ 548.314879] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 548.314879] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] yield resources [ 548.314879] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 548.314879] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] self.driver.spawn(context, instance, image_meta, [ 548.314879] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 548.314879] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 548.314879] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 548.314879] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] vm_ref = self.build_virtual_machine(instance, [ 548.315291] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 548.315291] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] vif_infos = vmwarevif.get_vif_info(self._session, [ 548.315291] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 548.315291] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] for vif in network_info: [ 548.315291] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 548.315291] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] return self._sync_wrapper(fn, *args, **kwargs) [ 548.315291] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 548.315291] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] self.wait() [ 548.315291] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 548.315291] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] self[:] = self._gt.wait() [ 548.315291] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 548.315291] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] return self._exit_event.wait() [ 548.315291] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 548.315657] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] result = hub.switch() [ 548.315657] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 548.315657] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] return self.greenlet.switch() [ 548.315657] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 548.315657] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] result = function(*args, **kwargs) [ 548.315657] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 548.315657] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] return func(*args, **kwargs) [ 548.315657] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 548.315657] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] raise e [ 548.315657] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 548.315657] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] nwinfo = self.network_api.allocate_for_instance( [ 548.315657] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 548.315657] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] created_port_ids = self._update_ports_for_instance( [ 548.315987] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 548.315987] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] with excutils.save_and_reraise_exception(): [ 548.315987] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 548.315987] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] self.force_reraise() [ 548.315987] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 548.315987] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] raise self.value [ 548.315987] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 548.315987] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] updated_port = self._update_port( [ 548.315987] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 548.315987] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] _ensure_no_port_binding_failure(port) [ 548.315987] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 548.315987] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] raise exception.PortBindingFailed(port_id=port['id']) [ 548.316363] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] nova.exception.PortBindingFailed: Binding failed for port f8f7383d-3e1b-4747-82b6-44e40294ada9, please check neutron logs for more information. [ 548.316363] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] [ 548.316363] env[61273]: INFO nova.compute.manager [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 228821ca-e981-405b-8952-8a1718103d3c] Terminating instance [ 548.316849] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Acquiring lock "refresh_cache-228821ca-e981-405b-8952-8a1718103d3c" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 548.316937] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Acquired lock "refresh_cache-228821ca-e981-405b-8952-8a1718103d3c" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 548.317103] env[61273]: DEBUG nova.network.neutron [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 228821ca-e981-405b-8952-8a1718103d3c] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 548.317530] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Expecting reply to msg 655fb75301f24286a9a7b41a3518f285 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 548.324857] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 655fb75301f24286a9a7b41a3518f285 [ 548.391179] env[61273]: DEBUG nova.network.neutron [req-cdd9a2fd-ae3f-482d-b598-c120add357e9 req-23b6b77b-4fd0-4708-a6fd-b006fe3e12df service nova] [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 548.441343] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] Acquiring lock "1804f229-97b9-4ee3-933d-715431a900f8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 548.441343] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] Lock "1804f229-97b9-4ee3-933d-715431a900f8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 548.447664] env[61273]: DEBUG nova.network.neutron [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 548.447953] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] Expecting reply to msg b6bc17fc07da472191285067131ea991 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 548.457359] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b6bc17fc07da472191285067131ea991 [ 548.574242] env[61273]: DEBUG nova.network.neutron [req-cdd9a2fd-ae3f-482d-b598-c120add357e9 req-23b6b77b-4fd0-4708-a6fd-b006fe3e12df service nova] [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 548.574739] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-cdd9a2fd-ae3f-482d-b598-c120add357e9 req-23b6b77b-4fd0-4708-a6fd-b006fe3e12df service nova] Expecting reply to msg 12a56376f81e4e2bafd52f4618ca20c1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 548.585287] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 12a56376f81e4e2bafd52f4618ca20c1 [ 548.601282] env[61273]: DEBUG oslo_concurrency.lockutils [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.883s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 548.601501] env[61273]: ERROR nova.compute.manager [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 86deca19-3809-4ccf-a3db-0d79d610d6c5, please check neutron logs for more information. [ 548.601501] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] Traceback (most recent call last): [ 548.601501] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 548.601501] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] self.driver.spawn(context, instance, image_meta, [ 548.601501] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 548.601501] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 548.601501] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 548.601501] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] vm_ref = self.build_virtual_machine(instance, [ 548.601501] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 548.601501] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] vif_infos = vmwarevif.get_vif_info(self._session, [ 548.601501] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 548.602202] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] for vif in network_info: [ 548.602202] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 548.602202] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] return self._sync_wrapper(fn, *args, **kwargs) [ 548.602202] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 548.602202] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] self.wait() [ 548.602202] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 548.602202] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] self[:] = self._gt.wait() [ 548.602202] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 548.602202] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] return self._exit_event.wait() [ 548.602202] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 548.602202] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] result = hub.switch() [ 548.602202] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 548.602202] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] return self.greenlet.switch() [ 548.602650] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 548.602650] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] result = function(*args, **kwargs) [ 548.602650] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 548.602650] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] return func(*args, **kwargs) [ 548.602650] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 548.602650] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] raise e [ 548.602650] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 548.602650] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] nwinfo = self.network_api.allocate_for_instance( [ 548.602650] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 548.602650] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] created_port_ids = self._update_ports_for_instance( [ 548.602650] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 548.602650] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] with excutils.save_and_reraise_exception(): [ 548.602650] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 548.603022] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] self.force_reraise() [ 548.603022] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 548.603022] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] raise self.value [ 548.603022] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 548.603022] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] updated_port = self._update_port( [ 548.603022] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 548.603022] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] _ensure_no_port_binding_failure(port) [ 548.603022] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 548.603022] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] raise exception.PortBindingFailed(port_id=port['id']) [ 548.603022] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] nova.exception.PortBindingFailed: Binding failed for port 86deca19-3809-4ccf-a3db-0d79d610d6c5, please check neutron logs for more information. [ 548.603022] env[61273]: ERROR nova.compute.manager [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] [ 548.603506] env[61273]: DEBUG nova.compute.utils [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] Binding failed for port 86deca19-3809-4ccf-a3db-0d79d610d6c5, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 548.603506] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 10.370s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 548.605317] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] Expecting reply to msg 898f3bbb32af4f28b08c3973e2791345 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 548.607027] env[61273]: DEBUG nova.compute.manager [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] Build of instance 8f2cba43-bdec-4455-b795-784b29e2ea5d was re-scheduled: Binding failed for port 86deca19-3809-4ccf-a3db-0d79d610d6c5, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 548.607461] env[61273]: DEBUG nova.compute.manager [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 548.607741] env[61273]: DEBUG oslo_concurrency.lockutils [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] Acquiring lock "refresh_cache-8f2cba43-bdec-4455-b795-784b29e2ea5d" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 548.607902] env[61273]: DEBUG oslo_concurrency.lockutils [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] Acquired lock "refresh_cache-8f2cba43-bdec-4455-b795-784b29e2ea5d" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 548.608076] env[61273]: DEBUG nova.network.neutron [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 548.608457] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] Expecting reply to msg b1376928f2c047e298df4eabe1fcdd2f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 548.624077] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b1376928f2c047e298df4eabe1fcdd2f [ 548.667524] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 898f3bbb32af4f28b08c3973e2791345 [ 548.680527] env[61273]: DEBUG oslo_vmware.api [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Task: {'id': task-375233, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.022858} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 548.680810] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Deleted the datastore file {{(pid=61273) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 548.681604] env[61273]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-71b9ed2a-c1f2-4ac6-ac46-85edff3be5ed {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.695534] env[61273]: DEBUG oslo_vmware.api [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Waiting for the task: (returnval){ [ 548.695534] env[61273]: value = "session[527e49f3-ce92-061c-8407-29c8a2392124]52e6b550-f242-004f-92c9-a3b9007878cf" [ 548.695534] env[61273]: _type = "Task" [ 548.695534] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 548.710937] env[61273]: DEBUG oslo_vmware.api [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]52e6b550-f242-004f-92c9-a3b9007878cf, 'name': SearchDatastore_Task, 'duration_secs': 0.008736} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 548.710937] env[61273]: DEBUG oslo_concurrency.lockutils [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 548.710937] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk to [datastore2] 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2/4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2.vmdk {{(pid=61273) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 548.710937] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e4c44743-9397-4035-93b4-693ab77f8125 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.718462] env[61273]: DEBUG oslo_vmware.api [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Waiting for the task: (returnval){ [ 548.718462] env[61273]: value = "task-375234" [ 548.718462] env[61273]: _type = "Task" [ 548.718462] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 548.728676] env[61273]: DEBUG oslo_vmware.api [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Task: {'id': task-375234, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 548.743615] env[61273]: DEBUG nova.compute.manager [req-9fa61386-d3cf-4c4c-be07-4a4d1c88f5fa req-8fa4b568-d956-410c-b3b9-061265dc9b56 service nova] [instance: 228821ca-e981-405b-8952-8a1718103d3c] Received event network-changed-f8f7383d-3e1b-4747-82b6-44e40294ada9 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 548.743615] env[61273]: DEBUG nova.compute.manager [req-9fa61386-d3cf-4c4c-be07-4a4d1c88f5fa req-8fa4b568-d956-410c-b3b9-061265dc9b56 service nova] [instance: 228821ca-e981-405b-8952-8a1718103d3c] Refreshing instance network info cache due to event network-changed-f8f7383d-3e1b-4747-82b6-44e40294ada9. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 548.743703] env[61273]: DEBUG oslo_concurrency.lockutils [req-9fa61386-d3cf-4c4c-be07-4a4d1c88f5fa req-8fa4b568-d956-410c-b3b9-061265dc9b56 service nova] Acquiring lock "refresh_cache-228821ca-e981-405b-8952-8a1718103d3c" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 548.855226] env[61273]: DEBUG nova.network.neutron [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 228821ca-e981-405b-8952-8a1718103d3c] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 548.950168] env[61273]: INFO nova.compute.manager [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] Took 1.03 seconds to deallocate network for instance. [ 548.953484] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] Expecting reply to msg ac7e2dbd6f8b43539b7105813083f052 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 549.004390] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ac7e2dbd6f8b43539b7105813083f052 [ 549.023679] env[61273]: DEBUG nova.network.neutron [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 228821ca-e981-405b-8952-8a1718103d3c] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 549.024254] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Expecting reply to msg b3e2e93bd4e2496a83e2d5c12c8c17f0 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 549.035997] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b3e2e93bd4e2496a83e2d5c12c8c17f0 [ 549.077346] env[61273]: DEBUG oslo_concurrency.lockutils [req-cdd9a2fd-ae3f-482d-b598-c120add357e9 req-23b6b77b-4fd0-4708-a6fd-b006fe3e12df service nova] Releasing lock "refresh_cache-1784917b-8a7e-4974-b8b3-f8f2b3db019a" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 549.077763] env[61273]: DEBUG nova.compute.manager [req-cdd9a2fd-ae3f-482d-b598-c120add357e9 req-23b6b77b-4fd0-4708-a6fd-b006fe3e12df service nova] [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] Received event network-vif-deleted-14197306-66ad-4077-914c-b79fd0a658db {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 549.130872] env[61273]: DEBUG nova.network.neutron [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 549.186012] env[61273]: DEBUG oslo_concurrency.lockutils [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Acquiring lock "017b1da4-7c9b-477d-92a3-29b2248317d3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 549.186355] env[61273]: DEBUG oslo_concurrency.lockutils [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Lock "017b1da4-7c9b-477d-92a3-29b2248317d3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 549.233944] env[61273]: DEBUG oslo_vmware.api [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Task: {'id': task-375234, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 549.251258] env[61273]: DEBUG nova.network.neutron [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 549.251879] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] Expecting reply to msg 03fee10cfbe34124a0e6ef45c03fc380 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 549.268759] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 03fee10cfbe34124a0e6ef45c03fc380 [ 549.449487] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f91d4491-c087-4482-9d9d-287f9d21cee6 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.460622] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] Expecting reply to msg ad99f41852b24d1f965862fa1bb39ea5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 549.461749] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37ecedcc-8497-46e6-ba05-ff5769ea364c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.496089] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d089f652-2e32-4aaa-9019-a0adc29e3a4b {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.503789] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb2be67d-4790-4be8-98c3-c4beb8cda032 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.518781] env[61273]: DEBUG nova.compute.provider_tree [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 549.519306] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] Expecting reply to msg 13e56b8f4a1e46ba9acdf26cb89ead2f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 549.523762] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ad99f41852b24d1f965862fa1bb39ea5 [ 549.526889] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Releasing lock "refresh_cache-228821ca-e981-405b-8952-8a1718103d3c" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 549.527876] env[61273]: DEBUG nova.compute.manager [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 228821ca-e981-405b-8952-8a1718103d3c] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 549.528316] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 228821ca-e981-405b-8952-8a1718103d3c] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 549.528888] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 13e56b8f4a1e46ba9acdf26cb89ead2f [ 549.529235] env[61273]: DEBUG oslo_concurrency.lockutils [req-9fa61386-d3cf-4c4c-be07-4a4d1c88f5fa req-8fa4b568-d956-410c-b3b9-061265dc9b56 service nova] Acquired lock "refresh_cache-228821ca-e981-405b-8952-8a1718103d3c" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 549.529442] env[61273]: DEBUG nova.network.neutron [req-9fa61386-d3cf-4c4c-be07-4a4d1c88f5fa req-8fa4b568-d956-410c-b3b9-061265dc9b56 service nova] [instance: 228821ca-e981-405b-8952-8a1718103d3c] Refreshing network info cache for port f8f7383d-3e1b-4747-82b6-44e40294ada9 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 549.529980] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-9fa61386-d3cf-4c4c-be07-4a4d1c88f5fa req-8fa4b568-d956-410c-b3b9-061265dc9b56 service nova] Expecting reply to msg 4adc8d35c35142e3be6246c8623eb686 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 549.530816] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f2490603-1d16-49f2-bbae-f35d919dd93e {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.536963] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4adc8d35c35142e3be6246c8623eb686 [ 549.540815] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b014f004-213c-4b5f-83c8-a622e30c4811 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.562687] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 228821ca-e981-405b-8952-8a1718103d3c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 228821ca-e981-405b-8952-8a1718103d3c could not be found. [ 549.562910] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 228821ca-e981-405b-8952-8a1718103d3c] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 549.563083] env[61273]: INFO nova.compute.manager [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 228821ca-e981-405b-8952-8a1718103d3c] Took 0.04 seconds to destroy the instance on the hypervisor. [ 549.563314] env[61273]: DEBUG oslo.service.loopingcall [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 549.563645] env[61273]: DEBUG nova.compute.manager [-] [instance: 228821ca-e981-405b-8952-8a1718103d3c] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 549.563645] env[61273]: DEBUG nova.network.neutron [-] [instance: 228821ca-e981-405b-8952-8a1718103d3c] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 549.583388] env[61273]: DEBUG nova.network.neutron [-] [instance: 228821ca-e981-405b-8952-8a1718103d3c] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 549.583920] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 54a1085a4d4249688ba708849f5e1d8a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 549.591650] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 54a1085a4d4249688ba708849f5e1d8a [ 549.729964] env[61273]: DEBUG oslo_vmware.api [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Task: {'id': task-375234, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.581692} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 549.730333] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk to [datastore2] 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2/4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2.vmdk {{(pid=61273) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 549.730638] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Extending root virtual disk to 1048576 {{(pid=61273) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 549.730969] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-74f28c4f-4e00-41f4-9d05-aecac5437d76 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.738374] env[61273]: DEBUG oslo_vmware.api [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Waiting for the task: (returnval){ [ 549.738374] env[61273]: value = "task-375235" [ 549.738374] env[61273]: _type = "Task" [ 549.738374] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 549.746897] env[61273]: DEBUG oslo_vmware.api [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Task: {'id': task-375235, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 549.759786] env[61273]: DEBUG oslo_concurrency.lockutils [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] Releasing lock "refresh_cache-8f2cba43-bdec-4455-b795-784b29e2ea5d" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 549.760246] env[61273]: DEBUG nova.compute.manager [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 549.760495] env[61273]: DEBUG nova.compute.manager [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 549.760754] env[61273]: DEBUG nova.network.neutron [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 549.804029] env[61273]: DEBUG nova.network.neutron [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 549.804429] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] Expecting reply to msg d41a2cf1bcdc43ba97a822f68637eb3f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 549.813534] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d41a2cf1bcdc43ba97a822f68637eb3f [ 550.024752] env[61273]: INFO nova.scheduler.client.report [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] Deleted allocations for instance 020a5b3a-bda7-4a8a-9dad-948cee5a7373 [ 550.032865] env[61273]: DEBUG nova.scheduler.client.report [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 550.032865] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] Expecting reply to msg 712a0cddbe124eb189e60011d2c4f400 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 550.032865] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] Expecting reply to msg 048bf9f2dd75487d9c2eaeb8f74847d7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 550.052304] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 712a0cddbe124eb189e60011d2c4f400 [ 550.064385] env[61273]: DEBUG nova.network.neutron [req-9fa61386-d3cf-4c4c-be07-4a4d1c88f5fa req-8fa4b568-d956-410c-b3b9-061265dc9b56 service nova] [instance: 228821ca-e981-405b-8952-8a1718103d3c] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 550.072785] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 048bf9f2dd75487d9c2eaeb8f74847d7 [ 550.091814] env[61273]: DEBUG nova.network.neutron [-] [instance: 228821ca-e981-405b-8952-8a1718103d3c] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 550.093729] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg f0726f6c3fea4a7d83087d9d0aacace3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 550.112446] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f0726f6c3fea4a7d83087d9d0aacace3 [ 550.205722] env[61273]: DEBUG nova.network.neutron [req-9fa61386-d3cf-4c4c-be07-4a4d1c88f5fa req-8fa4b568-d956-410c-b3b9-061265dc9b56 service nova] [instance: 228821ca-e981-405b-8952-8a1718103d3c] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 550.206228] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-9fa61386-d3cf-4c4c-be07-4a4d1c88f5fa req-8fa4b568-d956-410c-b3b9-061265dc9b56 service nova] Expecting reply to msg 775ade2f31b143f788157d34d7a5925a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 550.222225] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 775ade2f31b143f788157d34d7a5925a [ 550.250245] env[61273]: DEBUG oslo_vmware.api [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Task: {'id': task-375235, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066686} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 550.250515] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Extended root virtual disk {{(pid=61273) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 550.251416] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36e0cc6d-7b6c-45ca-ae3e-5fb7bae32115 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.279934] env[61273]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Reconfiguring VM instance instance-00000004 to attach disk [datastore2] 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2/4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2.vmdk or device None with type sparse {{(pid=61273) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 550.280282] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-70ec7ea0-1863-4797-bf45-e17a93c4768f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.307796] env[61273]: DEBUG nova.network.neutron [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 550.308536] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] Expecting reply to msg 2e3f2778880f4bc3b3ae8ed8a1f16205 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 550.310097] env[61273]: DEBUG oslo_vmware.api [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Waiting for the task: (returnval){ [ 550.310097] env[61273]: value = "task-375236" [ 550.310097] env[61273]: _type = "Task" [ 550.310097] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 550.317393] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2e3f2778880f4bc3b3ae8ed8a1f16205 [ 550.322454] env[61273]: DEBUG oslo_vmware.api [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Task: {'id': task-375236, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 550.332638] env[61273]: DEBUG oslo_concurrency.lockutils [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] Acquiring lock "32b57d1b-d35f-488e-be23-9119f2f56562" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 550.332863] env[61273]: DEBUG oslo_concurrency.lockutils [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] Lock "32b57d1b-d35f-488e-be23-9119f2f56562" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 550.537249] env[61273]: DEBUG oslo_concurrency.lockutils [None req-71c04b5d-a508-4ca5-84a2-1b16a0081f0d tempest-ServerDiagnosticsNegativeTest-495393484 tempest-ServerDiagnosticsNegativeTest-495393484-project-member] Lock "020a5b3a-bda7-4a8a-9dad-948cee5a7373" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.407s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 550.537913] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Expecting reply to msg c939401a33ea47cbae4534b42f5c8a3c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 550.538783] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Lock "020a5b3a-bda7-4a8a-9dad-948cee5a7373" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 16.956s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 550.538958] env[61273]: INFO nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] [instance: 020a5b3a-bda7-4a8a-9dad-948cee5a7373] During sync_power_state the instance has a pending task (spawning). Skip. [ 550.539164] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Lock "020a5b3a-bda7-4a8a-9dad-948cee5a7373" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 550.540068] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.937s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 550.540691] env[61273]: ERROR nova.compute.manager [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 2a66073a-39ef-4960-8443-ca3964c62be0, please check neutron logs for more information. [ 550.540691] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] Traceback (most recent call last): [ 550.540691] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 550.540691] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] self.driver.spawn(context, instance, image_meta, [ 550.540691] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 550.540691] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 550.540691] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 550.540691] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] vm_ref = self.build_virtual_machine(instance, [ 550.540691] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 550.540691] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] vif_infos = vmwarevif.get_vif_info(self._session, [ 550.540691] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 550.541075] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] for vif in network_info: [ 550.541075] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 550.541075] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] return self._sync_wrapper(fn, *args, **kwargs) [ 550.541075] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 550.541075] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] self.wait() [ 550.541075] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 550.541075] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] self[:] = self._gt.wait() [ 550.541075] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 550.541075] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] return self._exit_event.wait() [ 550.541075] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 550.541075] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] result = hub.switch() [ 550.541075] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 550.541075] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] return self.greenlet.switch() [ 550.541515] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 550.541515] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] result = function(*args, **kwargs) [ 550.541515] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 550.541515] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] return func(*args, **kwargs) [ 550.541515] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 550.541515] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] raise e [ 550.541515] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 550.541515] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] nwinfo = self.network_api.allocate_for_instance( [ 550.541515] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 550.541515] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] created_port_ids = self._update_ports_for_instance( [ 550.541515] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 550.541515] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] with excutils.save_and_reraise_exception(): [ 550.541515] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 550.541902] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] self.force_reraise() [ 550.541902] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 550.541902] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] raise self.value [ 550.541902] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 550.541902] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] updated_port = self._update_port( [ 550.541902] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 550.541902] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] _ensure_no_port_binding_failure(port) [ 550.541902] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 550.541902] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] raise exception.PortBindingFailed(port_id=port['id']) [ 550.541902] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] nova.exception.PortBindingFailed: Binding failed for port 2a66073a-39ef-4960-8443-ca3964c62be0, please check neutron logs for more information. [ 550.541902] env[61273]: ERROR nova.compute.manager [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] [ 550.542220] env[61273]: DEBUG nova.compute.utils [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] Binding failed for port 2a66073a-39ef-4960-8443-ca3964c62be0, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 550.542722] env[61273]: DEBUG oslo_concurrency.lockutils [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.428s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 550.544214] env[61273]: INFO nova.compute.claims [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] [instance: 62c3b24d-bee7-4dd2-a6c7-9303c7c28cca] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 550.545823] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Expecting reply to msg 4f01d706ff514ec6990e92585438eb7a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 550.547008] env[61273]: DEBUG nova.compute.manager [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] Build of instance ea496eae-68f9-43f1-b4cf-6743043c753b was re-scheduled: Binding failed for port 2a66073a-39ef-4960-8443-ca3964c62be0, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 550.547573] env[61273]: DEBUG nova.compute.manager [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 550.547750] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] Acquiring lock "refresh_cache-ea496eae-68f9-43f1-b4cf-6743043c753b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 550.547899] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] Acquired lock "refresh_cache-ea496eae-68f9-43f1-b4cf-6743043c753b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 550.548149] env[61273]: DEBUG nova.network.neutron [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 550.548545] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] Expecting reply to msg 8234b402094a4d7fb8c558f00c09453d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 550.552669] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c939401a33ea47cbae4534b42f5c8a3c [ 550.556263] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8234b402094a4d7fb8c558f00c09453d [ 550.599915] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4f01d706ff514ec6990e92585438eb7a [ 550.599915] env[61273]: INFO nova.compute.manager [-] [instance: 228821ca-e981-405b-8952-8a1718103d3c] Took 1.04 seconds to deallocate network for instance. [ 550.603033] env[61273]: DEBUG nova.compute.claims [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 228821ca-e981-405b-8952-8a1718103d3c] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 550.603033] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 550.708562] env[61273]: DEBUG oslo_concurrency.lockutils [req-9fa61386-d3cf-4c4c-be07-4a4d1c88f5fa req-8fa4b568-d956-410c-b3b9-061265dc9b56 service nova] Releasing lock "refresh_cache-228821ca-e981-405b-8952-8a1718103d3c" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 550.811095] env[61273]: INFO nova.compute.manager [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] Took 1.05 seconds to deallocate network for instance. [ 550.812922] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] Expecting reply to msg 2f91d58b75ac49dda970cd191b5d6e04 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 550.827034] env[61273]: DEBUG oslo_vmware.api [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Task: {'id': task-375236, 'name': ReconfigVM_Task, 'duration_secs': 0.255616} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 550.827034] env[61273]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Reconfigured VM instance instance-00000004 to attach disk [datastore2] 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2/4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2.vmdk or device None with type sparse {{(pid=61273) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 550.827034] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cfd5f65d-64b8-4d33-ab1c-9a5e742de4e7 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.829709] env[61273]: DEBUG oslo_vmware.api [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Waiting for the task: (returnval){ [ 550.829709] env[61273]: value = "task-375237" [ 550.829709] env[61273]: _type = "Task" [ 550.829709] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 550.837266] env[61273]: DEBUG oslo_vmware.api [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Task: {'id': task-375237, 'name': Rename_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 550.848448] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2f91d58b75ac49dda970cd191b5d6e04 [ 551.040592] env[61273]: DEBUG nova.compute.manager [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 551.042384] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Expecting reply to msg 3162bba69a584f06be545deb8cbf3d1d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 551.054084] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Expecting reply to msg 4e3b02a206d647b399f04788acb33e5f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 551.063330] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4e3b02a206d647b399f04788acb33e5f [ 551.079992] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3162bba69a584f06be545deb8cbf3d1d [ 551.097326] env[61273]: DEBUG nova.network.neutron [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 551.196299] env[61273]: DEBUG nova.network.neutron [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 551.196829] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] Expecting reply to msg ad9af4c3fbb1487fbae5dd3a2427471d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 551.252369] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ad9af4c3fbb1487fbae5dd3a2427471d [ 551.322537] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] Expecting reply to msg 0764679723d640bf8fa2667fd3071fc5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 551.340290] env[61273]: DEBUG oslo_vmware.api [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Task: {'id': task-375237, 'name': Rename_Task, 'duration_secs': 0.129082} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 551.340581] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Powering on the VM {{(pid=61273) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 551.340908] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e41a2d08-32d4-4434-88d9-bde93f1b84ff {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.349991] env[61273]: DEBUG oslo_vmware.api [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Waiting for the task: (returnval){ [ 551.349991] env[61273]: value = "task-375238" [ 551.349991] env[61273]: _type = "Task" [ 551.349991] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 551.366254] env[61273]: DEBUG oslo_vmware.api [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Task: {'id': task-375238, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 551.366793] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0764679723d640bf8fa2667fd3071fc5 [ 551.578308] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 551.702245] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] Releasing lock "refresh_cache-ea496eae-68f9-43f1-b4cf-6743043c753b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 551.702245] env[61273]: DEBUG nova.compute.manager [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 551.702245] env[61273]: DEBUG nova.compute.manager [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 551.702245] env[61273]: DEBUG nova.network.neutron [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 551.732170] env[61273]: DEBUG nova.network.neutron [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 551.732170] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] Expecting reply to msg 547a8604960c4af68131b6b86adc808f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 551.739853] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 547a8604960c4af68131b6b86adc808f [ 551.863254] env[61273]: DEBUG oslo_vmware.api [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Task: {'id': task-375238, 'name': PowerOnVM_Task, 'duration_secs': 0.468028} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 551.864522] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Powered on the VM {{(pid=61273) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 551.864851] env[61273]: DEBUG nova.compute.manager [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Checking state {{(pid=61273) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 551.866747] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00596a7d-8467-4b36-8142-26a60a2f5d64 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.883456] env[61273]: INFO nova.scheduler.client.report [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] Deleted allocations for instance 8f2cba43-bdec-4455-b795-784b29e2ea5d [ 551.891702] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Expecting reply to msg 4bfc52963a9d441084314cb7d2bd3457 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 551.892968] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] Expecting reply to msg 5ea2f81012a94ed6b37adcc1a750f9c6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 551.927257] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5ea2f81012a94ed6b37adcc1a750f9c6 [ 551.932593] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d68a5dab-ec7e-4271-a863-0f9de85c1e19 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.946896] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e770a3c-a3eb-4da1-8cbe-02388fd8ed9f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.955300] env[61273]: DEBUG nova.compute.manager [req-72e62fb4-b30c-4196-9a6f-4dfcd6ab3953 req-c7f0093d-92be-425e-8037-8dae9f5b0937 service nova] [instance: 228821ca-e981-405b-8952-8a1718103d3c] Received event network-vif-deleted-f8f7383d-3e1b-4747-82b6-44e40294ada9 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 551.992532] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c08d226e-065a-47d6-a75e-4738fd76163e {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.005399] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4bfc52963a9d441084314cb7d2bd3457 [ 552.011121] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d5989ca-7b76-4c17-a578-d4522c5e7d89 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.027166] env[61273]: DEBUG nova.compute.provider_tree [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 552.027474] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Expecting reply to msg e39e3556e402458894de77e5519cb06f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 552.043773] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e39e3556e402458894de77e5519cb06f [ 552.234756] env[61273]: DEBUG nova.network.neutron [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 552.235294] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] Expecting reply to msg 178fc62d12b646798d915e20496880e7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 552.248104] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 178fc62d12b646798d915e20496880e7 [ 552.402617] env[61273]: DEBUG oslo_concurrency.lockutils [None req-0043e483-94cf-44cf-8fc5-41e87170cd2b tempest-ImagesNegativeTestJSON-984867956 tempest-ImagesNegativeTestJSON-984867956-project-member] Lock "8f2cba43-bdec-4455-b795-784b29e2ea5d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.960s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 552.403207] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] Expecting reply to msg 0e631bd170754b909109283861bcadb0 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 552.403966] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Lock "8f2cba43-bdec-4455-b795-784b29e2ea5d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 18.820s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 552.404178] env[61273]: INFO nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] [instance: 8f2cba43-bdec-4455-b795-784b29e2ea5d] During sync_power_state the instance has a pending task (spawning). Skip. [ 552.404350] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Lock "8f2cba43-bdec-4455-b795-784b29e2ea5d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 552.406375] env[61273]: DEBUG oslo_concurrency.lockutils [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 552.419674] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0e631bd170754b909109283861bcadb0 [ 552.534708] env[61273]: DEBUG nova.scheduler.client.report [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 552.534708] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Expecting reply to msg 8bb49801b56f46fbbdb5a1a702f8982c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 552.553273] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8bb49801b56f46fbbdb5a1a702f8982c [ 552.655790] env[61273]: DEBUG oslo_concurrency.lockutils [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Acquiring lock "d4d3db12-8de6-4daf-a087-89bb043d1217" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 552.657052] env[61273]: DEBUG oslo_concurrency.lockutils [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Lock "d4d3db12-8de6-4daf-a087-89bb043d1217" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 552.738664] env[61273]: INFO nova.compute.manager [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] Took 1.04 seconds to deallocate network for instance. [ 552.740523] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] Expecting reply to msg b4db86624c064e02a76f4ccc8696a1db in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 552.823523] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b4db86624c064e02a76f4ccc8696a1db [ 552.905705] env[61273]: DEBUG nova.compute.manager [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 552.907512] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] Expecting reply to msg f0e72c945c6a41adb8b36d9130dbc5f3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 552.953124] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f0e72c945c6a41adb8b36d9130dbc5f3 [ 553.037748] env[61273]: DEBUG oslo_concurrency.lockutils [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.495s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 553.038273] env[61273]: DEBUG nova.compute.manager [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] [instance: 62c3b24d-bee7-4dd2-a6c7-9303c7c28cca] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 553.039954] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Expecting reply to msg 3ba5d7b1b7f74b3787a47677ea74e52f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 553.040999] env[61273]: DEBUG oslo_concurrency.lockutils [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 11.572s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 553.042726] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Expecting reply to msg 9895e5919dd14cefa6330446fdc60dde in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 553.081472] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3ba5d7b1b7f74b3787a47677ea74e52f [ 553.087697] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9895e5919dd14cefa6330446fdc60dde [ 553.248118] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] Expecting reply to msg 05d31ebf4dc34e99bf09a9dd50e13923 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 553.287442] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 05d31ebf4dc34e99bf09a9dd50e13923 [ 553.431826] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 553.546493] env[61273]: DEBUG nova.compute.utils [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 553.547150] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Expecting reply to msg d15b5b51cc5a4f9d9a79f79d8e85aeff in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 553.548148] env[61273]: DEBUG nova.compute.manager [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] [instance: 62c3b24d-bee7-4dd2-a6c7-9303c7c28cca] Not allocating networking since 'none' was specified. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 553.564184] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d15b5b51cc5a4f9d9a79f79d8e85aeff [ 553.741005] env[61273]: DEBUG oslo_concurrency.lockutils [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] Acquiring lock "13c1d417-4087-46ad-b513-fc3317995d18" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 553.744986] env[61273]: DEBUG oslo_concurrency.lockutils [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] Lock "13c1d417-4087-46ad-b513-fc3317995d18" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 553.781270] env[61273]: INFO nova.scheduler.client.report [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] Deleted allocations for instance ea496eae-68f9-43f1-b4cf-6743043c753b [ 553.787329] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] Expecting reply to msg 84d472dd9c164bb2a9b7ebf40d37809f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 553.826417] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 84d472dd9c164bb2a9b7ebf40d37809f [ 553.901247] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-153b1402-e5ce-4a0b-8a0c-8b4d87a32250 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.908931] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f184660-4599-40b0-acb7-f954a314f860 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.952165] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-220c973c-89ae-4b43-93f0-ccee0d318307 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.960837] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6a20141-b864-4ebc-8227-527450b2c87b {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.976253] env[61273]: DEBUG nova.compute.provider_tree [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 553.976805] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Expecting reply to msg b10b22cac70e4e77b00ff637f5d1046a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 553.985115] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b10b22cac70e4e77b00ff637f5d1046a [ 554.050201] env[61273]: DEBUG nova.compute.manager [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] [instance: 62c3b24d-bee7-4dd2-a6c7-9303c7c28cca] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 554.052407] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Expecting reply to msg a64cdd194a1e4cc888315bb7acdf8a8e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 554.094541] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a64cdd194a1e4cc888315bb7acdf8a8e [ 554.289804] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ade7888a-18d7-44c7-9f8d-23d1865c16ae tempest-FloatingIPsAssociationNegativeTestJSON-1245561580 tempest-FloatingIPsAssociationNegativeTestJSON-1245561580-project-member] Lock "ea496eae-68f9-43f1-b4cf-6743043c753b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.803s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 554.290603] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] Expecting reply to msg c6e4ce960d0246d69007e7f3a1ce31c0 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 554.291487] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Lock "ea496eae-68f9-43f1-b4cf-6743043c753b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 20.707s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 554.291706] env[61273]: INFO nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] [instance: ea496eae-68f9-43f1-b4cf-6743043c753b] During sync_power_state the instance has a pending task (spawning). Skip. [ 554.291891] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Lock "ea496eae-68f9-43f1-b4cf-6743043c753b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 554.303179] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c6e4ce960d0246d69007e7f3a1ce31c0 [ 554.479706] env[61273]: DEBUG nova.scheduler.client.report [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 554.482164] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Expecting reply to msg a0c02206fdc54724a2ea5d00d649ebd3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 554.500826] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a0c02206fdc54724a2ea5d00d649ebd3 [ 554.556832] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Expecting reply to msg b8a44216e94c4875a4b266e532a7b06b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 554.618027] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b8a44216e94c4875a4b266e532a7b06b [ 554.630118] env[61273]: DEBUG oslo_concurrency.lockutils [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] Acquiring lock "782dc85a-56f4-4f03-8711-b78bbadb33ce" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 554.630348] env[61273]: DEBUG oslo_concurrency.lockutils [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] Lock "782dc85a-56f4-4f03-8711-b78bbadb33ce" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 554.794136] env[61273]: DEBUG nova.compute.manager [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 554.795347] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] Expecting reply to msg ae53fe00b19e4e70b4d0e37f5f4ccb19 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 554.840189] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ae53fe00b19e4e70b4d0e37f5f4ccb19 [ 554.856769] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Expecting reply to msg c51ddf2642964df6aa549fed71cd453f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 554.874487] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c51ddf2642964df6aa549fed71cd453f [ 554.985103] env[61273]: DEBUG oslo_concurrency.lockutils [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.944s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 554.985790] env[61273]: ERROR nova.compute.manager [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port caf04a4d-9a60-4997-9d4d-89dfc0470281, please check neutron logs for more information. [ 554.985790] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] Traceback (most recent call last): [ 554.985790] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 554.985790] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] self.driver.spawn(context, instance, image_meta, [ 554.985790] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 554.985790] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 554.985790] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 554.985790] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] vm_ref = self.build_virtual_machine(instance, [ 554.985790] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 554.985790] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] vif_infos = vmwarevif.get_vif_info(self._session, [ 554.985790] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 554.986447] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] for vif in network_info: [ 554.986447] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 554.986447] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] return self._sync_wrapper(fn, *args, **kwargs) [ 554.986447] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 554.986447] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] self.wait() [ 554.986447] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 554.986447] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] self[:] = self._gt.wait() [ 554.986447] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 554.986447] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] return self._exit_event.wait() [ 554.986447] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 554.986447] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] result = hub.switch() [ 554.986447] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 554.986447] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] return self.greenlet.switch() [ 554.987092] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 554.987092] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] result = function(*args, **kwargs) [ 554.987092] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 554.987092] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] return func(*args, **kwargs) [ 554.987092] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 554.987092] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] raise e [ 554.987092] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 554.987092] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] nwinfo = self.network_api.allocate_for_instance( [ 554.987092] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 554.987092] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] created_port_ids = self._update_ports_for_instance( [ 554.987092] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 554.987092] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] with excutils.save_and_reraise_exception(): [ 554.987092] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 554.987692] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] self.force_reraise() [ 554.987692] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 554.987692] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] raise self.value [ 554.987692] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 554.987692] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] updated_port = self._update_port( [ 554.987692] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 554.987692] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] _ensure_no_port_binding_failure(port) [ 554.987692] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 554.987692] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] raise exception.PortBindingFailed(port_id=port['id']) [ 554.987692] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] nova.exception.PortBindingFailed: Binding failed for port caf04a4d-9a60-4997-9d4d-89dfc0470281, please check neutron logs for more information. [ 554.987692] env[61273]: ERROR nova.compute.manager [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] [ 554.988379] env[61273]: DEBUG nova.compute.utils [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] Binding failed for port caf04a4d-9a60-4997-9d4d-89dfc0470281, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 554.988379] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.345s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 554.990046] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Expecting reply to msg 54356719c20645b6b6999db428f241cf in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 554.991490] env[61273]: DEBUG nova.compute.manager [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] Build of instance f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5 was re-scheduled: Binding failed for port caf04a4d-9a60-4997-9d4d-89dfc0470281, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 554.992048] env[61273]: DEBUG nova.compute.manager [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 554.992790] env[61273]: DEBUG oslo_concurrency.lockutils [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Acquiring lock "refresh_cache-f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 554.992790] env[61273]: DEBUG oslo_concurrency.lockutils [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Acquired lock "refresh_cache-f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 554.992790] env[61273]: DEBUG nova.network.neutron [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 554.992993] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Expecting reply to msg 917ce0fa0f76434da64a0ce30d960fb8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 555.012670] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 917ce0fa0f76434da64a0ce30d960fb8 [ 555.037579] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 54356719c20645b6b6999db428f241cf [ 555.063950] env[61273]: DEBUG nova.compute.manager [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] [instance: 62c3b24d-bee7-4dd2-a6c7-9303c7c28cca] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 555.089235] env[61273]: DEBUG nova.virt.hardware [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 555.090239] env[61273]: DEBUG nova.virt.hardware [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 555.090723] env[61273]: DEBUG nova.virt.hardware [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 555.091113] env[61273]: DEBUG nova.virt.hardware [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 555.091398] env[61273]: DEBUG nova.virt.hardware [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 555.091737] env[61273]: DEBUG nova.virt.hardware [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 555.092115] env[61273]: DEBUG nova.virt.hardware [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 555.092601] env[61273]: DEBUG nova.virt.hardware [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 555.093074] env[61273]: DEBUG nova.virt.hardware [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 555.093382] env[61273]: DEBUG nova.virt.hardware [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 555.093753] env[61273]: DEBUG nova.virt.hardware [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 555.094926] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7160e520-58b5-48ab-b1d4-f56bd0e3c19a {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.108016] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a885a877-6295-4a8b-9853-37c0bba88eec {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.125357] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] [instance: 62c3b24d-bee7-4dd2-a6c7-9303c7c28cca] Instance VIF info [] {{(pid=61273) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 555.131527] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Creating folder: Project (921b38590177446c941b2872e63cf976). Parent ref: group-v103328. {{(pid=61273) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 555.132019] env[61273]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-152521ee-d8e3-4bef-bb15-9c87eefbbd06 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.143596] env[61273]: INFO nova.virt.vmwareapi.vm_util [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Created folder: Project (921b38590177446c941b2872e63cf976) in parent group-v103328. [ 555.144017] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Creating folder: Instances. Parent ref: group-v103333. {{(pid=61273) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 555.144395] env[61273]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bf8f19d7-a4bd-4016-8156-28be849f67b3 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.153276] env[61273]: INFO nova.virt.vmwareapi.vm_util [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Created folder: Instances in parent group-v103333. [ 555.153713] env[61273]: DEBUG oslo.service.loopingcall [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 555.154040] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 62c3b24d-bee7-4dd2-a6c7-9303c7c28cca] Creating VM on the ESX host {{(pid=61273) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 555.154389] env[61273]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-43f20698-a883-424e-a5d4-ad6912518ef2 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.178982] env[61273]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 555.178982] env[61273]: value = "task-375241" [ 555.178982] env[61273]: _type = "Task" [ 555.178982] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 555.203074] env[61273]: DEBUG oslo_vmware.api [-] Task: {'id': task-375241, 'name': CreateVM_Task} progress is 6%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 555.315688] env[61273]: DEBUG oslo_concurrency.lockutils [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 555.366323] env[61273]: INFO nova.compute.manager [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Rebuilding instance [ 555.420026] env[61273]: DEBUG nova.compute.manager [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Checking state {{(pid=61273) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 555.421304] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-122022d3-5410-4536-b56f-c927eb6c50ef {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.431216] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Expecting reply to msg 3a1d3664ceb646d384803507308a77c4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 555.492055] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3a1d3664ceb646d384803507308a77c4 [ 555.536902] env[61273]: DEBUG nova.network.neutron [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 555.690374] env[61273]: DEBUG oslo_vmware.api [-] Task: {'id': task-375241, 'name': CreateVM_Task} progress is 99%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 555.751069] env[61273]: DEBUG nova.network.neutron [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 555.751069] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Expecting reply to msg c2f3721e57634da58a74852d2bd6b454 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 555.758640] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c2f3721e57634da58a74852d2bd6b454 [ 555.872349] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30faf7cf-3612-4ee3-aa25-504e520c04d8 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.880437] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e152f56-4524-4261-83a7-3746d4855cd2 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.913420] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-662b530b-ca3c-4c0c-a69d-f216c732e5d8 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.922054] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa8ca930-57bc-41a3-8371-d3e99c198ba2 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.936476] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Powering off the VM {{(pid=61273) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 555.938380] env[61273]: DEBUG nova.compute.provider_tree [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 555.938540] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Expecting reply to msg a9c80f540cbf46afb1186b7f4bea6492 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 555.939429] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-057c8ca1-436b-471d-b06d-b05464474756 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.946162] env[61273]: DEBUG oslo_vmware.api [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Waiting for the task: (returnval){ [ 555.946162] env[61273]: value = "task-375242" [ 555.946162] env[61273]: _type = "Task" [ 555.946162] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 555.946800] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a9c80f540cbf46afb1186b7f4bea6492 [ 555.959293] env[61273]: DEBUG oslo_vmware.api [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Task: {'id': task-375242, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 556.193558] env[61273]: DEBUG oslo_vmware.api [-] Task: {'id': task-375241, 'name': CreateVM_Task} progress is 99%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 556.253076] env[61273]: DEBUG oslo_concurrency.lockutils [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Releasing lock "refresh_cache-f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 556.253342] env[61273]: DEBUG nova.compute.manager [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 556.253515] env[61273]: DEBUG nova.compute.manager [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 556.253676] env[61273]: DEBUG nova.network.neutron [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 556.304517] env[61273]: DEBUG nova.network.neutron [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 556.305071] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Expecting reply to msg 771891f58b7542f995b78581862cd9db in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 556.312547] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 771891f58b7542f995b78581862cd9db [ 556.443374] env[61273]: DEBUG nova.scheduler.client.report [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 556.445841] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Expecting reply to msg b3543c4b25a345ca90b8f721c639d551 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 556.457913] env[61273]: DEBUG oslo_vmware.api [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Task: {'id': task-375242, 'name': PowerOffVM_Task, 'duration_secs': 0.130866} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 556.458176] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Powered off the VM {{(pid=61273) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 556.458448] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 556.459302] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78584101-f7db-4a9d-bb61-e462e72fed03 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.462145] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b3543c4b25a345ca90b8f721c639d551 [ 556.466874] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Unregistering the VM {{(pid=61273) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 556.467108] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6692bf50-4539-41b0-9ce4-af28345d4e41 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.490501] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Unregistered the VM {{(pid=61273) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 556.490501] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Deleting contents of the VM from datastore datastore2 {{(pid=61273) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 556.490593] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Deleting the datastore file [datastore2] 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2 {{(pid=61273) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 556.491044] env[61273]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2cfdbe56-0174-4ebb-a801-48c0c122e6ce {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.499456] env[61273]: DEBUG oslo_vmware.api [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Waiting for the task: (returnval){ [ 556.499456] env[61273]: value = "task-375244" [ 556.499456] env[61273]: _type = "Task" [ 556.499456] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 556.510397] env[61273]: DEBUG oslo_vmware.api [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Task: {'id': task-375244, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 556.691293] env[61273]: DEBUG oslo_vmware.api [-] Task: {'id': task-375241, 'name': CreateVM_Task, 'duration_secs': 1.302308} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 556.691547] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 62c3b24d-bee7-4dd2-a6c7-9303c7c28cca] Created VM on the ESX host {{(pid=61273) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 556.692469] env[61273]: DEBUG oslo_concurrency.lockutils [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 556.692572] env[61273]: DEBUG oslo_concurrency.lockutils [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 556.695415] env[61273]: DEBUG oslo_concurrency.lockutils [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 556.696083] env[61273]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e74ac2e-842f-48f2-84d5-a2ff76a78696 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.707616] env[61273]: DEBUG oslo_vmware.api [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Waiting for the task: (returnval){ [ 556.707616] env[61273]: value = "session[527e49f3-ce92-061c-8407-29c8a2392124]521ddc19-0dd9-175e-444d-de1250fa6b9b" [ 556.707616] env[61273]: _type = "Task" [ 556.707616] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 556.718142] env[61273]: DEBUG oslo_vmware.api [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]521ddc19-0dd9-175e-444d-de1250fa6b9b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 556.807155] env[61273]: DEBUG nova.network.neutron [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 556.807639] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Expecting reply to msg 7fe2e991c2be4ff787cb76a6ead3152f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 556.816874] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7fe2e991c2be4ff787cb76a6ead3152f [ 556.948466] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.960s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 556.949018] env[61273]: ERROR nova.compute.manager [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 916f0a29-ac67-4ea8-b7f9-7d34e8b0b737, please check neutron logs for more information. [ 556.949018] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] Traceback (most recent call last): [ 556.949018] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 556.949018] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] self.driver.spawn(context, instance, image_meta, [ 556.949018] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 556.949018] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] self._vmops.spawn(context, instance, image_meta, injected_files, [ 556.949018] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 556.949018] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] vm_ref = self.build_virtual_machine(instance, [ 556.949018] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 556.949018] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] vif_infos = vmwarevif.get_vif_info(self._session, [ 556.949018] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 556.949369] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] for vif in network_info: [ 556.949369] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 556.949369] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] return self._sync_wrapper(fn, *args, **kwargs) [ 556.949369] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 556.949369] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] self.wait() [ 556.949369] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 556.949369] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] self[:] = self._gt.wait() [ 556.949369] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 556.949369] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] return self._exit_event.wait() [ 556.949369] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 556.949369] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] result = hub.switch() [ 556.949369] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 556.949369] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] return self.greenlet.switch() [ 556.949718] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 556.949718] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] result = function(*args, **kwargs) [ 556.949718] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 556.949718] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] return func(*args, **kwargs) [ 556.949718] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 556.949718] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] raise e [ 556.949718] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 556.949718] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] nwinfo = self.network_api.allocate_for_instance( [ 556.949718] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 556.949718] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] created_port_ids = self._update_ports_for_instance( [ 556.949718] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 556.949718] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] with excutils.save_and_reraise_exception(): [ 556.949718] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 556.950061] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] self.force_reraise() [ 556.950061] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 556.950061] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] raise self.value [ 556.950061] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 556.950061] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] updated_port = self._update_port( [ 556.950061] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 556.950061] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] _ensure_no_port_binding_failure(port) [ 556.950061] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 556.950061] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] raise exception.PortBindingFailed(port_id=port['id']) [ 556.950061] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] nova.exception.PortBindingFailed: Binding failed for port 916f0a29-ac67-4ea8-b7f9-7d34e8b0b737, please check neutron logs for more information. [ 556.950061] env[61273]: ERROR nova.compute.manager [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] [ 556.950407] env[61273]: DEBUG nova.compute.utils [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] Binding failed for port 916f0a29-ac67-4ea8-b7f9-7d34e8b0b737, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 556.951293] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.516s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 556.952867] env[61273]: INFO nova.compute.claims [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 556.955729] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] Expecting reply to msg d3e0b4400b614cc5ba8d6f762395dd58 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 556.957081] env[61273]: DEBUG nova.compute.manager [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] Build of instance 40709d2b-cc33-4ac0-9a13-731442d7edff was re-scheduled: Binding failed for port 916f0a29-ac67-4ea8-b7f9-7d34e8b0b737, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 556.957754] env[61273]: DEBUG nova.compute.manager [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 556.958188] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Acquiring lock "refresh_cache-40709d2b-cc33-4ac0-9a13-731442d7edff" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 556.958418] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Acquired lock "refresh_cache-40709d2b-cc33-4ac0-9a13-731442d7edff" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 556.958637] env[61273]: DEBUG nova.network.neutron [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 556.959097] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Expecting reply to msg ff331c75aa1a4f529b02b73bea55e21e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 556.968882] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ff331c75aa1a4f529b02b73bea55e21e [ 557.014102] env[61273]: DEBUG oslo_vmware.api [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Task: {'id': task-375244, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.090107} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 557.014375] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Deleted the datastore file {{(pid=61273) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 557.014595] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Deleted contents of the VM from datastore datastore2 {{(pid=61273) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 557.014847] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 557.016600] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Expecting reply to msg e1b5f610673c402c9da02b665179cf7a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 557.019412] env[61273]: DEBUG nova.network.neutron [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 557.027765] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d3e0b4400b614cc5ba8d6f762395dd58 [ 557.063761] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e1b5f610673c402c9da02b665179cf7a [ 557.185008] env[61273]: DEBUG nova.network.neutron [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 557.185519] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Expecting reply to msg 7025509b8daf4841b435ad4efdfa66c9 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 557.194291] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7025509b8daf4841b435ad4efdfa66c9 [ 557.223874] env[61273]: DEBUG oslo_vmware.api [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]521ddc19-0dd9-175e-444d-de1250fa6b9b, 'name': SearchDatastore_Task, 'duration_secs': 0.010433} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 557.224336] env[61273]: DEBUG oslo_concurrency.lockutils [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 557.224608] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] [instance: 62c3b24d-bee7-4dd2-a6c7-9303c7c28cca] Processing image 4a9e718e-a6a1-4b4a-b567-8e55529b2d5b {{(pid=61273) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 557.224897] env[61273]: DEBUG oslo_concurrency.lockutils [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 557.225078] env[61273]: DEBUG oslo_concurrency.lockutils [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 557.225261] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61273) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 557.225802] env[61273]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2b20f128-fd73-4723-936b-70101e31b925 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.234498] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61273) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 557.234693] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61273) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 557.235513] env[61273]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79f632d9-d9b4-48ac-a142-832d2eacaa9b {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.241878] env[61273]: DEBUG oslo_vmware.api [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Waiting for the task: (returnval){ [ 557.241878] env[61273]: value = "session[527e49f3-ce92-061c-8407-29c8a2392124]529e930e-dc17-b615-442e-320d92900ecd" [ 557.241878] env[61273]: _type = "Task" [ 557.241878] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 557.250481] env[61273]: DEBUG oslo_vmware.api [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]529e930e-dc17-b615-442e-320d92900ecd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 557.310392] env[61273]: INFO nova.compute.manager [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] Took 1.06 seconds to deallocate network for instance. [ 557.313978] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Expecting reply to msg e2af707d9d4d40de9fdafe393edadb9b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 557.355601] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e2af707d9d4d40de9fdafe393edadb9b [ 557.463492] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] Expecting reply to msg 036f93949417456a956ff9930d6ac90b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 557.471555] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 036f93949417456a956ff9930d6ac90b [ 557.521389] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Expecting reply to msg 7f8114abd9c64630896819205b422d20 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 557.559316] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7f8114abd9c64630896819205b422d20 [ 557.687476] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Releasing lock "refresh_cache-40709d2b-cc33-4ac0-9a13-731442d7edff" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 557.687760] env[61273]: DEBUG nova.compute.manager [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 557.687951] env[61273]: DEBUG nova.compute.manager [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 557.688185] env[61273]: DEBUG nova.network.neutron [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 557.746197] env[61273]: DEBUG nova.network.neutron [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 557.746808] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Expecting reply to msg d9ef7c401d1d40b3b270bc576e8ddd08 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 557.760799] env[61273]: DEBUG oslo_vmware.api [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]529e930e-dc17-b615-442e-320d92900ecd, 'name': SearchDatastore_Task, 'duration_secs': 0.008018} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 557.760799] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d9ef7c401d1d40b3b270bc576e8ddd08 [ 557.760799] env[61273]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-67b6090e-91ea-4237-b68b-a9bc5164bd03 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.764762] env[61273]: DEBUG oslo_vmware.api [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Waiting for the task: (returnval){ [ 557.764762] env[61273]: value = "session[527e49f3-ce92-061c-8407-29c8a2392124]52cca3ab-a098-c206-1ed3-b544a90690cd" [ 557.764762] env[61273]: _type = "Task" [ 557.764762] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 557.772457] env[61273]: DEBUG oslo_vmware.api [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]52cca3ab-a098-c206-1ed3-b544a90690cd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 557.823341] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Expecting reply to msg 0e4e1e60a4b14f1a88ad44ec52bb39c3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 557.928772] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0e4e1e60a4b14f1a88ad44ec52bb39c3 [ 558.047751] env[61273]: DEBUG nova.virt.hardware [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 558.048082] env[61273]: DEBUG nova.virt.hardware [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 558.048279] env[61273]: DEBUG nova.virt.hardware [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 558.048480] env[61273]: DEBUG nova.virt.hardware [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 558.048627] env[61273]: DEBUG nova.virt.hardware [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 558.048781] env[61273]: DEBUG nova.virt.hardware [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 558.048958] env[61273]: DEBUG nova.virt.hardware [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 558.049111] env[61273]: DEBUG nova.virt.hardware [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 558.049270] env[61273]: DEBUG nova.virt.hardware [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 558.049424] env[61273]: DEBUG nova.virt.hardware [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 558.049629] env[61273]: DEBUG nova.virt.hardware [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 558.050555] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baec3a85-07e1-49b3-a7d9-7a2ef12e0f3e {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.062433] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f2c697d-2f82-4719-b6d8-899163543370 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.082063] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Instance VIF info [] {{(pid=61273) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 558.087621] env[61273]: DEBUG oslo.service.loopingcall [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 558.090218] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Creating VM on the ESX host {{(pid=61273) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 558.090593] env[61273]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f198be36-bca6-4ce6-92bf-90aed853b333 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.122169] env[61273]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 558.122169] env[61273]: value = "task-375245" [ 558.122169] env[61273]: _type = "Task" [ 558.122169] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 558.132549] env[61273]: DEBUG oslo_vmware.api [-] Task: {'id': task-375245, 'name': CreateVM_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 558.252272] env[61273]: DEBUG nova.network.neutron [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 558.253097] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Expecting reply to msg 1215f72b74bb4b18908fc042db6b177a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 558.267636] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1215f72b74bb4b18908fc042db6b177a [ 558.279147] env[61273]: DEBUG oslo_vmware.api [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]52cca3ab-a098-c206-1ed3-b544a90690cd, 'name': SearchDatastore_Task, 'duration_secs': 0.009039} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 558.279412] env[61273]: DEBUG oslo_concurrency.lockutils [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 558.279766] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk to [datastore2] 62c3b24d-bee7-4dd2-a6c7-9303c7c28cca/62c3b24d-bee7-4dd2-a6c7-9303c7c28cca.vmdk {{(pid=61273) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 558.280237] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b14226aa-da7c-4907-841d-27672ae6abd5 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.287328] env[61273]: DEBUG oslo_vmware.api [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Waiting for the task: (returnval){ [ 558.287328] env[61273]: value = "task-375246" [ 558.287328] env[61273]: _type = "Task" [ 558.287328] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 558.298090] env[61273]: DEBUG oslo_vmware.api [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Task: {'id': task-375246, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 558.350376] env[61273]: INFO nova.scheduler.client.report [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Deleted allocations for instance f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5 [ 558.366940] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Expecting reply to msg 30c8109b258a4a28a382672e7436213b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 558.382335] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 30c8109b258a4a28a382672e7436213b [ 558.394097] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c698d7d-b1cb-47e3-9a3e-a6d114a8f4a5 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.401977] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d94e68b-60d9-400f-8d76-2581592fa8a6 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.435874] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d8b91bc-c0ec-461b-9c8a-7f77880008ea {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.443389] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0da22725-de36-49fd-b68c-2cc14f1f7031 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.466538] env[61273]: DEBUG nova.compute.provider_tree [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 558.467128] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] Expecting reply to msg 56b335bca47e43eab735fe950cb0362d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 558.475572] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 56b335bca47e43eab735fe950cb0362d [ 558.646794] env[61273]: DEBUG oslo_vmware.api [-] Task: {'id': task-375245, 'name': CreateVM_Task} progress is 99%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 558.756396] env[61273]: INFO nova.compute.manager [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] [instance: 40709d2b-cc33-4ac0-9a13-731442d7edff] Took 1.07 seconds to deallocate network for instance. [ 558.758157] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Expecting reply to msg 5718164bc7744d3daa46b29363e68391 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 558.797343] env[61273]: DEBUG oslo_vmware.api [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Task: {'id': task-375246, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 558.826515] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5718164bc7744d3daa46b29363e68391 [ 558.874065] env[61273]: DEBUG oslo_concurrency.lockutils [None req-49624888-9b63-46d8-9568-cc2a60de73dc tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Lock "f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.424s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 558.874065] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Expecting reply to msg a0ee7d67f999434e877ed3604e5a0a8f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 558.874065] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Lock "f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 25.287s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 558.874065] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6ecb2a41-aea3-4a81-a22c-66a5793712b6 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.880847] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff10120c-3ccf-4f92-816b-8243da663680 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.906331] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a0ee7d67f999434e877ed3604e5a0a8f [ 558.920318] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg d49a4ae96a2a4b05952da55bc6c3014d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 558.972030] env[61273]: DEBUG nova.scheduler.client.report [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 558.972030] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] Expecting reply to msg 443dbb21bf7845aa8dcea3208439aeb2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 558.973960] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d49a4ae96a2a4b05952da55bc6c3014d [ 558.988969] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 443dbb21bf7845aa8dcea3208439aeb2 [ 559.139458] env[61273]: DEBUG oslo_vmware.api [-] Task: {'id': task-375245, 'name': CreateVM_Task} progress is 99%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 559.172805] env[61273]: DEBUG oslo_concurrency.lockutils [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] Acquiring lock "1336becb-9691-490c-86ea-3bc70d13d7df" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 559.173041] env[61273]: DEBUG oslo_concurrency.lockutils [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] Lock "1336becb-9691-490c-86ea-3bc70d13d7df" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 559.263770] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Expecting reply to msg 06980e803db441fb866e246b30cec66a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 559.298512] env[61273]: DEBUG oslo_vmware.api [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Task: {'id': task-375246, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.524767} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 559.298872] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk to [datastore2] 62c3b24d-bee7-4dd2-a6c7-9303c7c28cca/62c3b24d-bee7-4dd2-a6c7-9303c7c28cca.vmdk {{(pid=61273) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 559.299122] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] [instance: 62c3b24d-bee7-4dd2-a6c7-9303c7c28cca] Extending root virtual disk to 1048576 {{(pid=61273) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 559.299413] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a8b37061-68bd-43f9-831f-abf75e3d65de {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.309991] env[61273]: DEBUG oslo_vmware.api [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Waiting for the task: (returnval){ [ 559.309991] env[61273]: value = "task-375247" [ 559.309991] env[61273]: _type = "Task" [ 559.309991] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 559.315021] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 06980e803db441fb866e246b30cec66a [ 559.320135] env[61273]: DEBUG oslo_vmware.api [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Task: {'id': task-375247, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 559.374602] env[61273]: DEBUG nova.compute.manager [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 559.375791] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Expecting reply to msg 3166c6fd8ab947199bd62f435a9e4a4d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 559.423278] env[61273]: INFO nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] [instance: f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5] During the sync_power process the instance has moved from host None to host cpu-1 [ 559.423529] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Lock "f58fbc9b-ba60-4c56-bbdf-fbaf45ec12b5" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.552s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 559.451119] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3166c6fd8ab947199bd62f435a9e4a4d [ 559.481058] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.530s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 559.481719] env[61273]: DEBUG nova.compute.manager [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 559.483636] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] Expecting reply to msg fda81a644e044eabab58e2b4493b8b4d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 559.484733] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 12.618s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 559.485512] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg dbf180d148494238af38213c2c9e1903 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 559.529989] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dbf180d148494238af38213c2c9e1903 [ 559.534144] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fda81a644e044eabab58e2b4493b8b4d [ 559.640295] env[61273]: DEBUG oslo_vmware.api [-] Task: {'id': task-375245, 'name': CreateVM_Task, 'duration_secs': 1.335729} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 559.640537] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Created VM on the ESX host {{(pid=61273) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 559.641018] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 559.641251] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 559.641646] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Acquired external semaphore "[datastore2] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 559.641962] env[61273]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f3757b1e-f136-4666-9691-c92f8086f0a4 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.647550] env[61273]: DEBUG oslo_vmware.api [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Waiting for the task: (returnval){ [ 559.647550] env[61273]: value = "session[527e49f3-ce92-061c-8407-29c8a2392124]52b6bf0e-3713-79e0-35c2-5b1b3adb4920" [ 559.647550] env[61273]: _type = "Task" [ 559.647550] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 559.655969] env[61273]: DEBUG oslo_vmware.api [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]52b6bf0e-3713-79e0-35c2-5b1b3adb4920, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 559.797270] env[61273]: INFO nova.scheduler.client.report [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Deleted allocations for instance 40709d2b-cc33-4ac0-9a13-731442d7edff [ 559.804769] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Expecting reply to msg 2b6c20a93cdf47bcb556eeab7e501b88 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 559.826433] env[61273]: DEBUG oslo_vmware.api [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Task: {'id': task-375247, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076741} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 559.828041] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] [instance: 62c3b24d-bee7-4dd2-a6c7-9303c7c28cca] Extended root virtual disk {{(pid=61273) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 559.828041] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6029a10c-375d-4f12-82db-ed1bf72cab36 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.831410] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2b6c20a93cdf47bcb556eeab7e501b88 [ 559.850371] env[61273]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] [instance: 62c3b24d-bee7-4dd2-a6c7-9303c7c28cca] Reconfiguring VM instance instance-0000000b to attach disk [datastore2] 62c3b24d-bee7-4dd2-a6c7-9303c7c28cca/62c3b24d-bee7-4dd2-a6c7-9303c7c28cca.vmdk or device None with type sparse {{(pid=61273) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 559.850981] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-55a6ce29-5cb1-4851-94d6-b497eda10205 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.873565] env[61273]: DEBUG oslo_vmware.api [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Waiting for the task: (returnval){ [ 559.873565] env[61273]: value = "task-375248" [ 559.873565] env[61273]: _type = "Task" [ 559.873565] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 559.889192] env[61273]: DEBUG oslo_vmware.api [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Task: {'id': task-375248, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 559.902137] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 559.989296] env[61273]: DEBUG nova.compute.utils [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 559.990027] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] Expecting reply to msg 5f5f5816dab944f8b83aaa292c36fda5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 559.993233] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 0bd60cefc9b14185b7123c36bf188dfe in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 559.994027] env[61273]: DEBUG nova.compute.manager [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 559.994194] env[61273]: DEBUG nova.network.neutron [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 560.006594] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0bd60cefc9b14185b7123c36bf188dfe [ 560.009063] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5f5f5816dab944f8b83aaa292c36fda5 [ 560.126941] env[61273]: DEBUG nova.policy [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8efc2fd425c047a1bbfa9e05794f23b6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '81bbad196c3b40d6b138d3366da6248b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 560.160299] env[61273]: DEBUG oslo_vmware.api [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]52b6bf0e-3713-79e0-35c2-5b1b3adb4920, 'name': SearchDatastore_Task, 'duration_secs': 0.00947} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 560.160623] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 560.160860] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Processing image 4a9e718e-a6a1-4b4a-b567-8e55529b2d5b {{(pid=61273) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 560.161096] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 560.161241] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 560.161414] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61273) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 560.161748] env[61273]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f7297f81-c123-4f94-9634-5002104aeb8b {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.170191] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61273) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 560.170386] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Folder [datastore2] devstack-image-cache_base created. {{(pid=61273) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 560.171206] env[61273]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a21790b-0189-481a-89ab-d6314ed02f69 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.176678] env[61273]: DEBUG oslo_vmware.api [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Waiting for the task: (returnval){ [ 560.176678] env[61273]: value = "session[527e49f3-ce92-061c-8407-29c8a2392124]52ded166-ef24-030e-00e9-fd6d25ee9a54" [ 560.176678] env[61273]: _type = "Task" [ 560.176678] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 560.187722] env[61273]: DEBUG oslo_vmware.api [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]52ded166-ef24-030e-00e9-fd6d25ee9a54, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 560.310296] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d7699baf-a8e6-40a5-be5b-f44995055732 tempest-ServersAdminTestJSON-923997297 tempest-ServersAdminTestJSON-923997297-project-member] Lock "40709d2b-cc33-4ac0-9a13-731442d7edff" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.374s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 560.310894] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Expecting reply to msg 50c11fe9d9f5429a9e2333b860414e00 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 560.322481] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 50c11fe9d9f5429a9e2333b860414e00 [ 560.389411] env[61273]: DEBUG oslo_vmware.api [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Task: {'id': task-375248, 'name': ReconfigVM_Task, 'duration_secs': 0.253947} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 560.389697] env[61273]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] [instance: 62c3b24d-bee7-4dd2-a6c7-9303c7c28cca] Reconfigured VM instance instance-0000000b to attach disk [datastore2] 62c3b24d-bee7-4dd2-a6c7-9303c7c28cca/62c3b24d-bee7-4dd2-a6c7-9303c7c28cca.vmdk or device None with type sparse {{(pid=61273) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 560.390298] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-911bb162-137d-4745-bf92-98b0e7913ae4 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.403257] env[61273]: DEBUG oslo_vmware.api [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Waiting for the task: (returnval){ [ 560.403257] env[61273]: value = "task-375249" [ 560.403257] env[61273]: _type = "Task" [ 560.403257] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 560.425750] env[61273]: DEBUG oslo_vmware.api [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Task: {'id': task-375249, 'name': Rename_Task} progress is 10%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 560.507494] env[61273]: DEBUG nova.compute.manager [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 560.509447] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] Expecting reply to msg d293023e2325455d81b4c82d504e2c44 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 560.552387] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 560.552638] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance 1784917b-8a7e-4974-b8b3-f8f2b3db019a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 560.552838] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance 228821ca-e981-405b-8952-8a1718103d3c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 560.553042] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance 62c3b24d-bee7-4dd2-a6c7-9303c7c28cca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 560.553238] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance 1d64d913-45f0-4768-8375-7863d9ae43c3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 560.554180] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg eea7535b39774d4799c12a2e62c549a0 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 560.567454] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d293023e2325455d81b4c82d504e2c44 [ 560.604058] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eea7535b39774d4799c12a2e62c549a0 [ 560.697617] env[61273]: DEBUG oslo_vmware.api [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]52ded166-ef24-030e-00e9-fd6d25ee9a54, 'name': SearchDatastore_Task, 'duration_secs': 0.008047} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 560.698847] env[61273]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7790424e-11f0-4ac8-b1f0-c1b7cd80a2a6 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.709043] env[61273]: DEBUG oslo_vmware.api [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Waiting for the task: (returnval){ [ 560.709043] env[61273]: value = "session[527e49f3-ce92-061c-8407-29c8a2392124]52a7700a-5d89-8be8-b2a3-6674858081b0" [ 560.709043] env[61273]: _type = "Task" [ 560.709043] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 560.721946] env[61273]: DEBUG oslo_vmware.api [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]52a7700a-5d89-8be8-b2a3-6674858081b0, 'name': SearchDatastore_Task, 'duration_secs': 0.008912} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 560.723133] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 560.723570] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Copying Virtual Disk [datastore2] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk to [datastore2] 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2/4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2.vmdk {{(pid=61273) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 560.723969] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-55fb9b68-f373-490e-9932-bd1af8e2921c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.732266] env[61273]: DEBUG oslo_vmware.api [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Waiting for the task: (returnval){ [ 560.732266] env[61273]: value = "task-375250" [ 560.732266] env[61273]: _type = "Task" [ 560.732266] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 560.744114] env[61273]: DEBUG oslo_vmware.api [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Task: {'id': task-375250, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 560.812879] env[61273]: DEBUG nova.compute.manager [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 560.814940] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Expecting reply to msg ef22729f77a447e29c1127e021a6c211 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 560.861128] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ef22729f77a447e29c1127e021a6c211 [ 560.914363] env[61273]: DEBUG oslo_vmware.api [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Task: {'id': task-375249, 'name': Rename_Task, 'duration_secs': 0.309332} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 560.914862] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] [instance: 62c3b24d-bee7-4dd2-a6c7-9303c7c28cca] Powering on the VM {{(pid=61273) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 560.915241] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-166de4c5-f93e-470a-8c69-74d076f5c02c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.921183] env[61273]: DEBUG oslo_vmware.api [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Waiting for the task: (returnval){ [ 560.921183] env[61273]: value = "task-375251" [ 560.921183] env[61273]: _type = "Task" [ 560.921183] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 560.934765] env[61273]: DEBUG oslo_vmware.api [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Task: {'id': task-375251, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 560.942983] env[61273]: DEBUG nova.network.neutron [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] Successfully created port: d803f7f9-4edd-478d-8efa-d1db78feae38 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 561.014264] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] Expecting reply to msg 0f21cee571a84fad87e78fb87c6e18ff in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 561.057738] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0f21cee571a84fad87e78fb87c6e18ff [ 561.064937] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance f0c26eb3-e6d6-4d9f-9f07-5add9de6d126 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 561.064937] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 6f13e5c8dc7d4f79936820bc7ef3110e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 561.090251] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6f13e5c8dc7d4f79936820bc7ef3110e [ 561.244031] env[61273]: DEBUG oslo_vmware.api [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Task: {'id': task-375250, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 561.337296] env[61273]: DEBUG oslo_concurrency.lockutils [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 561.434314] env[61273]: DEBUG oslo_vmware.api [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Task: {'id': task-375251, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 561.518873] env[61273]: DEBUG nova.compute.manager [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 561.568032] env[61273]: DEBUG nova.virt.hardware [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 561.568032] env[61273]: DEBUG nova.virt.hardware [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 561.568032] env[61273]: DEBUG nova.virt.hardware [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 561.569389] env[61273]: DEBUG nova.virt.hardware [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 561.569389] env[61273]: DEBUG nova.virt.hardware [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 561.569389] env[61273]: DEBUG nova.virt.hardware [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 561.569389] env[61273]: DEBUG nova.virt.hardware [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 561.569389] env[61273]: DEBUG nova.virt.hardware [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 561.569706] env[61273]: DEBUG nova.virt.hardware [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 561.569706] env[61273]: DEBUG nova.virt.hardware [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 561.569706] env[61273]: DEBUG nova.virt.hardware [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 561.569706] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 561.569706] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg f5237a0b00804bd5bb879e6f6b8dd621 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 561.570168] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbff0329-eb79-46fa-b336-ca8373d2baf6 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.595362] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f5237a0b00804bd5bb879e6f6b8dd621 [ 561.597595] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4aae3ec-d749-4e89-8d35-95ae791776a5 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.745994] env[61273]: DEBUG oslo_vmware.api [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Task: {'id': task-375250, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.820751} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 561.746409] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Copied Virtual Disk [datastore2] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk to [datastore2] 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2/4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2.vmdk {{(pid=61273) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 561.746677] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Extending root virtual disk to 1048576 {{(pid=61273) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 561.746955] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-66bf7d77-c663-4420-825a-e78b174b38dc {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.754180] env[61273]: DEBUG oslo_vmware.api [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Waiting for the task: (returnval){ [ 561.754180] env[61273]: value = "task-375252" [ 561.754180] env[61273]: _type = "Task" [ 561.754180] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 561.764260] env[61273]: DEBUG oslo_vmware.api [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Task: {'id': task-375252, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 561.931674] env[61273]: DEBUG oslo_vmware.api [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Task: {'id': task-375251, 'name': PowerOnVM_Task, 'duration_secs': 0.559893} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 561.932012] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] [instance: 62c3b24d-bee7-4dd2-a6c7-9303c7c28cca] Powered on the VM {{(pid=61273) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 561.932219] env[61273]: INFO nova.compute.manager [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] [instance: 62c3b24d-bee7-4dd2-a6c7-9303c7c28cca] Took 6.87 seconds to spawn the instance on the hypervisor. [ 561.932396] env[61273]: DEBUG nova.compute.manager [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] [instance: 62c3b24d-bee7-4dd2-a6c7-9303c7c28cca] Checking state {{(pid=61273) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 561.933215] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-228e9c4c-41a8-4fe3-9843-bc192805f6b1 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.942009] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Expecting reply to msg c30964ef8d034eb9a3faee6f8313aadf in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 562.019178] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c30964ef8d034eb9a3faee6f8313aadf [ 562.071275] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance 3f1f549f-8034-4685-b6f0-db5a7a2a4a32 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 562.071903] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 8319bbae6c994aff94945e317492b7a8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 562.082991] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8319bbae6c994aff94945e317492b7a8 [ 562.264217] env[61273]: DEBUG oslo_vmware.api [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Task: {'id': task-375252, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.117361} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 562.264497] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Extended root virtual disk {{(pid=61273) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 562.265297] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b965613-928b-4512-a9b0-140f8488a75e {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.286992] env[61273]: DEBUG nova.virt.vmwareapi.volumeops [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Reconfiguring VM instance instance-00000004 to attach disk [datastore2] 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2/4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2.vmdk or device None with type sparse {{(pid=61273) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 562.287311] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-be292bf2-866e-428a-a6fa-8c498c2cc106 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.308359] env[61273]: DEBUG oslo_vmware.api [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Waiting for the task: (returnval){ [ 562.308359] env[61273]: value = "task-375253" [ 562.308359] env[61273]: _type = "Task" [ 562.308359] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 562.320700] env[61273]: DEBUG oslo_vmware.api [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Task: {'id': task-375253, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 562.454801] env[61273]: INFO nova.compute.manager [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] [instance: 62c3b24d-bee7-4dd2-a6c7-9303c7c28cca] Took 23.37 seconds to build instance. [ 562.455832] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Expecting reply to msg 9fb0b72893f54ec39a4e4b97de514d39 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 562.473082] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9fb0b72893f54ec39a4e4b97de514d39 [ 562.575027] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance 36c3ac75-5bfd-4a89-9ddb-28fded8da39c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 562.575468] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 4c344998e6084cfa8d47e61c3f27cb9d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 562.593065] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4c344998e6084cfa8d47e61c3f27cb9d [ 562.818505] env[61273]: DEBUG oslo_vmware.api [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Task: {'id': task-375253, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 562.958745] env[61273]: DEBUG oslo_concurrency.lockutils [None req-4b20b822-c6f9-46a9-9852-aa9f838979ee tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Lock "62c3b24d-bee7-4dd2-a6c7-9303c7c28cca" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.044s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 562.958745] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] Expecting reply to msg 08507517c1e0464e9e686b01b6df5047 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 562.972174] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 08507517c1e0464e9e686b01b6df5047 [ 563.078754] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance 5cddeea1-7558-4c12-afdc-2ea7a706881a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 563.079335] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 0a9bd42a4fc140d5923e327ea42139f5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 563.099206] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0a9bd42a4fc140d5923e327ea42139f5 [ 563.320168] env[61273]: DEBUG oslo_vmware.api [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Task: {'id': task-375253, 'name': ReconfigVM_Task, 'duration_secs': 0.613712} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 563.320168] env[61273]: DEBUG nova.virt.vmwareapi.volumeops [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Reconfigured VM instance instance-00000004 to attach disk [datastore2] 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2/4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2.vmdk or device None with type sparse {{(pid=61273) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 563.321171] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-548a06b0-55b7-4313-8cdb-b01a2d1ab6c4 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.327953] env[61273]: DEBUG oslo_vmware.api [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Waiting for the task: (returnval){ [ 563.327953] env[61273]: value = "task-375254" [ 563.327953] env[61273]: _type = "Task" [ 563.327953] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 563.336464] env[61273]: DEBUG oslo_vmware.api [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Task: {'id': task-375254, 'name': Rename_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 563.461507] env[61273]: DEBUG nova.compute.manager [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 563.463402] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] Expecting reply to msg 86505b0469ea4512bb06afb7c968deb1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 563.503332] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 86505b0469ea4512bb06afb7c968deb1 [ 563.504689] env[61273]: ERROR nova.compute.manager [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port d803f7f9-4edd-478d-8efa-d1db78feae38, please check neutron logs for more information. [ 563.504689] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 563.504689] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 563.504689] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 563.504689] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 563.504689] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 563.504689] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 563.504689] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 563.504689] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 563.504689] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 563.504689] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 563.504689] env[61273]: ERROR nova.compute.manager raise self.value [ 563.504689] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 563.504689] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 563.504689] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 563.504689] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 563.505315] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 563.505315] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 563.505315] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port d803f7f9-4edd-478d-8efa-d1db78feae38, please check neutron logs for more information. [ 563.505315] env[61273]: ERROR nova.compute.manager [ 563.505315] env[61273]: Traceback (most recent call last): [ 563.505315] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 563.505315] env[61273]: listener.cb(fileno) [ 563.505315] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 563.505315] env[61273]: result = function(*args, **kwargs) [ 563.505315] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 563.505315] env[61273]: return func(*args, **kwargs) [ 563.505315] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 563.505315] env[61273]: raise e [ 563.505315] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 563.505315] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 563.505315] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 563.505315] env[61273]: created_port_ids = self._update_ports_for_instance( [ 563.505315] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 563.505315] env[61273]: with excutils.save_and_reraise_exception(): [ 563.505315] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 563.505315] env[61273]: self.force_reraise() [ 563.505315] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 563.505315] env[61273]: raise self.value [ 563.505315] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 563.505315] env[61273]: updated_port = self._update_port( [ 563.505315] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 563.505315] env[61273]: _ensure_no_port_binding_failure(port) [ 563.505315] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 563.505315] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 563.506321] env[61273]: nova.exception.PortBindingFailed: Binding failed for port d803f7f9-4edd-478d-8efa-d1db78feae38, please check neutron logs for more information. [ 563.506321] env[61273]: Removing descriptor: 15 [ 563.506321] env[61273]: ERROR nova.compute.manager [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port d803f7f9-4edd-478d-8efa-d1db78feae38, please check neutron logs for more information. [ 563.506321] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] Traceback (most recent call last): [ 563.506321] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 563.506321] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] yield resources [ 563.506321] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 563.506321] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] self.driver.spawn(context, instance, image_meta, [ 563.506321] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 563.506321] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 563.506321] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 563.506321] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] vm_ref = self.build_virtual_machine(instance, [ 563.506793] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 563.506793] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] vif_infos = vmwarevif.get_vif_info(self._session, [ 563.506793] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 563.506793] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] for vif in network_info: [ 563.506793] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 563.506793] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] return self._sync_wrapper(fn, *args, **kwargs) [ 563.506793] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 563.506793] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] self.wait() [ 563.506793] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 563.506793] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] self[:] = self._gt.wait() [ 563.506793] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 563.506793] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] return self._exit_event.wait() [ 563.506793] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 563.507208] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] result = hub.switch() [ 563.507208] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 563.507208] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] return self.greenlet.switch() [ 563.507208] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 563.507208] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] result = function(*args, **kwargs) [ 563.507208] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 563.507208] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] return func(*args, **kwargs) [ 563.507208] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 563.507208] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] raise e [ 563.507208] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 563.507208] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] nwinfo = self.network_api.allocate_for_instance( [ 563.507208] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 563.507208] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] created_port_ids = self._update_ports_for_instance( [ 563.508500] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 563.508500] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] with excutils.save_and_reraise_exception(): [ 563.508500] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 563.508500] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] self.force_reraise() [ 563.508500] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 563.508500] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] raise self.value [ 563.508500] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 563.508500] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] updated_port = self._update_port( [ 563.508500] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 563.508500] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] _ensure_no_port_binding_failure(port) [ 563.508500] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 563.508500] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] raise exception.PortBindingFailed(port_id=port['id']) [ 563.509269] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] nova.exception.PortBindingFailed: Binding failed for port d803f7f9-4edd-478d-8efa-d1db78feae38, please check neutron logs for more information. [ 563.509269] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] [ 563.509269] env[61273]: INFO nova.compute.manager [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] Terminating instance [ 563.509269] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] Acquiring lock "refresh_cache-1d64d913-45f0-4768-8375-7863d9ae43c3" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 563.509269] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] Acquired lock "refresh_cache-1d64d913-45f0-4768-8375-7863d9ae43c3" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 563.509269] env[61273]: DEBUG nova.network.neutron [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 563.509452] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] Expecting reply to msg c68afe4a7af44a9696131a1106f1f5e6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 563.517688] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c68afe4a7af44a9696131a1106f1f5e6 [ 563.584976] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance 7f8b08d4-3535-48ab-ba3f-a159511e2a64 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 563.585550] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 502bb847633d4d22ab542db9b45b661a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 563.601028] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 502bb847633d4d22ab542db9b45b661a [ 563.677557] env[61273]: DEBUG nova.compute.manager [req-fb0e7d34-ce04-429f-be35-d57369d62082 req-29aa4fec-3085-4867-9e3f-4842e64eaf3c service nova] [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] Received event network-changed-d803f7f9-4edd-478d-8efa-d1db78feae38 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 563.677797] env[61273]: DEBUG nova.compute.manager [req-fb0e7d34-ce04-429f-be35-d57369d62082 req-29aa4fec-3085-4867-9e3f-4842e64eaf3c service nova] [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] Refreshing instance network info cache due to event network-changed-d803f7f9-4edd-478d-8efa-d1db78feae38. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 563.678133] env[61273]: DEBUG oslo_concurrency.lockutils [req-fb0e7d34-ce04-429f-be35-d57369d62082 req-29aa4fec-3085-4867-9e3f-4842e64eaf3c service nova] Acquiring lock "refresh_cache-1d64d913-45f0-4768-8375-7863d9ae43c3" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 563.819479] env[61273]: DEBUG oslo_concurrency.lockutils [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] Acquiring lock "30ed4438-4f74-4bc3-a6cc-a59420751940" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 563.819840] env[61273]: DEBUG oslo_concurrency.lockutils [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] Lock "30ed4438-4f74-4bc3-a6cc-a59420751940" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 563.838562] env[61273]: DEBUG oslo_vmware.api [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Task: {'id': task-375254, 'name': Rename_Task, 'duration_secs': 0.436561} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 563.838854] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Powering on the VM {{(pid=61273) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 563.839095] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ee776982-9dab-4ef4-bf05-36f5ae719872 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.845673] env[61273]: DEBUG oslo_vmware.api [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Waiting for the task: (returnval){ [ 563.845673] env[61273]: value = "task-375255" [ 563.845673] env[61273]: _type = "Task" [ 563.845673] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 563.855199] env[61273]: DEBUG oslo_vmware.api [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Task: {'id': task-375255, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 563.990746] env[61273]: DEBUG oslo_concurrency.lockutils [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 564.047430] env[61273]: DEBUG nova.network.neutron [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 564.088110] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance 7d1f7566-8e5e-476c-9d19-49ed7b16c308 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 564.089075] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 18eb201f16fc40cca7cd498f14420941 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 564.102706] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 18eb201f16fc40cca7cd498f14420941 [ 564.166991] env[61273]: DEBUG nova.network.neutron [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 564.167566] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] Expecting reply to msg f8b2094173d44b4795d5bbf3c9d78b44 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 564.185292] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f8b2094173d44b4795d5bbf3c9d78b44 [ 564.357719] env[61273]: DEBUG oslo_vmware.api [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Task: {'id': task-375255, 'name': PowerOnVM_Task} progress is 64%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 564.381723] env[61273]: DEBUG nova.compute.manager [None req-8f4ac284-7e87-4fa9-84e8-1df69841ae1d tempest-ServerDiagnosticsV248Test-273727717 tempest-ServerDiagnosticsV248Test-273727717-project-admin] [instance: 62c3b24d-bee7-4dd2-a6c7-9303c7c28cca] Checking state {{(pid=61273) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 564.381723] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7441f83-72bd-41cf-9ca6-0831ae3a6823 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.391927] env[61273]: INFO nova.compute.manager [None req-8f4ac284-7e87-4fa9-84e8-1df69841ae1d tempest-ServerDiagnosticsV248Test-273727717 tempest-ServerDiagnosticsV248Test-273727717-project-admin] [instance: 62c3b24d-bee7-4dd2-a6c7-9303c7c28cca] Retrieving diagnostics [ 564.392780] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1657c2ac-edc7-4350-b38a-8c8789cf0a21 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.591629] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 564.592272] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 406dbfa1acad46b7b5080ada7204d637 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 564.603846] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 406dbfa1acad46b7b5080ada7204d637 [ 564.670419] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] Releasing lock "refresh_cache-1d64d913-45f0-4768-8375-7863d9ae43c3" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 564.670904] env[61273]: DEBUG nova.compute.manager [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 564.671136] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 564.672392] env[61273]: DEBUG oslo_concurrency.lockutils [req-fb0e7d34-ce04-429f-be35-d57369d62082 req-29aa4fec-3085-4867-9e3f-4842e64eaf3c service nova] Acquired lock "refresh_cache-1d64d913-45f0-4768-8375-7863d9ae43c3" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 564.672574] env[61273]: DEBUG nova.network.neutron [req-fb0e7d34-ce04-429f-be35-d57369d62082 req-29aa4fec-3085-4867-9e3f-4842e64eaf3c service nova] [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] Refreshing network info cache for port d803f7f9-4edd-478d-8efa-d1db78feae38 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 564.673036] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-fb0e7d34-ce04-429f-be35-d57369d62082 req-29aa4fec-3085-4867-9e3f-4842e64eaf3c service nova] Expecting reply to msg 5ae45b98b36646f989f6871521e973da in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 564.674355] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5d3ec66d-8314-497d-aeaf-b411771d06f5 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.682048] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5ae45b98b36646f989f6871521e973da [ 564.685823] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4c70f20-c6ed-4ef6-8d82-4ac827938b82 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.709686] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1d64d913-45f0-4768-8375-7863d9ae43c3 could not be found. [ 564.710134] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 564.710383] env[61273]: INFO nova.compute.manager [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] Took 0.04 seconds to destroy the instance on the hypervisor. [ 564.710801] env[61273]: DEBUG oslo.service.loopingcall [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 564.711022] env[61273]: DEBUG nova.compute.manager [-] [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 564.711161] env[61273]: DEBUG nova.network.neutron [-] [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 564.742976] env[61273]: DEBUG nova.network.neutron [-] [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 564.743722] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg b2dd7ef1de8646d48a6242060db7d134 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 564.750635] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b2dd7ef1de8646d48a6242060db7d134 [ 564.857921] env[61273]: DEBUG oslo_vmware.api [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Task: {'id': task-375255, 'name': PowerOnVM_Task, 'duration_secs': 0.821416} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 564.858198] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Powered on the VM {{(pid=61273) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 564.858398] env[61273]: DEBUG nova.compute.manager [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Checking state {{(pid=61273) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 564.859149] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-999a0865-038f-4129-b683-90239aaad48a {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.866788] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Expecting reply to msg bd7bc0b49eac48a7a345064f5b5e6541 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 564.943361] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bd7bc0b49eac48a7a345064f5b5e6541 [ 565.095542] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance 1804f229-97b9-4ee3-933d-715431a900f8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 565.096164] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg b8673ffcf45e482db0fec6a1fb2052a8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 565.108035] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b8673ffcf45e482db0fec6a1fb2052a8 [ 565.224877] env[61273]: DEBUG nova.network.neutron [req-fb0e7d34-ce04-429f-be35-d57369d62082 req-29aa4fec-3085-4867-9e3f-4842e64eaf3c service nova] [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 565.249288] env[61273]: DEBUG nova.network.neutron [-] [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 565.249805] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 6e3e6f145d6243a59bf2719dfe04352a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 565.258292] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6e3e6f145d6243a59bf2719dfe04352a [ 565.381752] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 565.386384] env[61273]: DEBUG nova.network.neutron [req-fb0e7d34-ce04-429f-be35-d57369d62082 req-29aa4fec-3085-4867-9e3f-4842e64eaf3c service nova] [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 565.386942] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-fb0e7d34-ce04-429f-be35-d57369d62082 req-29aa4fec-3085-4867-9e3f-4842e64eaf3c service nova] Expecting reply to msg e44a96dc4bd444b4af457a599a64ab3c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 565.405095] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e44a96dc4bd444b4af457a599a64ab3c [ 565.600238] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance 017b1da4-7c9b-477d-92a3-29b2248317d3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 565.600238] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg f0280354698f403abc28e176507306ef in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 565.614962] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f0280354698f403abc28e176507306ef [ 565.752077] env[61273]: INFO nova.compute.manager [-] [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] Took 1.04 seconds to deallocate network for instance. [ 565.755053] env[61273]: DEBUG nova.compute.claims [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 565.755225] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 565.889580] env[61273]: DEBUG oslo_concurrency.lockutils [req-fb0e7d34-ce04-429f-be35-d57369d62082 req-29aa4fec-3085-4867-9e3f-4842e64eaf3c service nova] Releasing lock "refresh_cache-1d64d913-45f0-4768-8375-7863d9ae43c3" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 566.102850] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance 32b57d1b-d35f-488e-be23-9119f2f56562 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 566.103401] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 00b72e2b66c54bf68186698aab02f9ac in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 566.114349] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 00b72e2b66c54bf68186698aab02f9ac [ 566.126945] env[61273]: DEBUG oslo_concurrency.lockutils [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] Acquiring lock "767d7956-954b-4be7-8cc6-45872ff4cfce" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 566.127246] env[61273]: DEBUG oslo_concurrency.lockutils [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] Lock "767d7956-954b-4be7-8cc6-45872ff4cfce" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 566.378306] env[61273]: DEBUG nova.compute.manager [req-17f43546-c0d9-48cb-b71e-8f9c4902da26 req-c5e0d51d-15c8-4326-9166-fb8a390cd41f service nova] [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] Received event network-vif-deleted-d803f7f9-4edd-478d-8efa-d1db78feae38 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 566.606122] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance d4d3db12-8de6-4daf-a087-89bb043d1217 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 566.606727] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg ced2351b97f54bcc9e04e2e72c8a02d2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 566.621724] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ced2351b97f54bcc9e04e2e72c8a02d2 [ 566.730222] env[61273]: DEBUG oslo_concurrency.lockutils [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Acquiring lock "109fc11e-d640-4617-99a3-0defe0a5aa6c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 566.730476] env[61273]: DEBUG oslo_concurrency.lockutils [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Lock "109fc11e-d640-4617-99a3-0defe0a5aa6c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 566.823938] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-759d0da7-f412-4fdf-a466-01ca69876e62 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Expecting reply to msg 5e8e5cd1966c4dde827c44c6ed4aed43 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 566.832805] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5e8e5cd1966c4dde827c44c6ed4aed43 [ 567.109687] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance 13c1d417-4087-46ad-b513-fc3317995d18 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 567.110307] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 6b99261e255340918a90c24823fc6386 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 567.125501] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6b99261e255340918a90c24823fc6386 [ 567.327893] env[61273]: DEBUG oslo_concurrency.lockutils [None req-759d0da7-f412-4fdf-a466-01ca69876e62 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Acquiring lock "4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 567.327893] env[61273]: DEBUG oslo_concurrency.lockutils [None req-759d0da7-f412-4fdf-a466-01ca69876e62 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Lock "4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 567.327893] env[61273]: DEBUG oslo_concurrency.lockutils [None req-759d0da7-f412-4fdf-a466-01ca69876e62 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Acquiring lock "4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 567.327893] env[61273]: DEBUG oslo_concurrency.lockutils [None req-759d0da7-f412-4fdf-a466-01ca69876e62 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Lock "4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 567.328071] env[61273]: DEBUG oslo_concurrency.lockutils [None req-759d0da7-f412-4fdf-a466-01ca69876e62 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Lock "4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 567.330221] env[61273]: INFO nova.compute.manager [None req-759d0da7-f412-4fdf-a466-01ca69876e62 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Terminating instance [ 567.331352] env[61273]: DEBUG oslo_concurrency.lockutils [None req-759d0da7-f412-4fdf-a466-01ca69876e62 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Acquiring lock "refresh_cache-4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 567.331720] env[61273]: DEBUG oslo_concurrency.lockutils [None req-759d0da7-f412-4fdf-a466-01ca69876e62 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Acquired lock "refresh_cache-4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 567.332269] env[61273]: DEBUG nova.network.neutron [None req-759d0da7-f412-4fdf-a466-01ca69876e62 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 567.335919] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-759d0da7-f412-4fdf-a466-01ca69876e62 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Expecting reply to msg 27e127478d0b418a8ac8425c283c1d7e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 567.344484] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 27e127478d0b418a8ac8425c283c1d7e [ 567.613768] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance 782dc85a-56f4-4f03-8711-b78bbadb33ce has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 567.613768] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg e9b82c211a834075be72a4f2b7c64e07 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 567.626074] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e9b82c211a834075be72a4f2b7c64e07 [ 567.866857] env[61273]: DEBUG nova.network.neutron [None req-759d0da7-f412-4fdf-a466-01ca69876e62 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 568.006043] env[61273]: DEBUG nova.network.neutron [None req-759d0da7-f412-4fdf-a466-01ca69876e62 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 568.006043] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-759d0da7-f412-4fdf-a466-01ca69876e62 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Expecting reply to msg f656ced3be4c4d3990257f6bddc940ce in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 568.014473] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f656ced3be4c4d3990257f6bddc940ce [ 568.116420] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance 1336becb-9691-490c-86ea-3bc70d13d7df has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 568.116696] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=61273) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 568.116821] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1472MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=61273) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 568.495182] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38c6fab6-169d-45c1-ae4a-84880853f324 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.504889] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3069fe29-ed34-4206-b3ae-c563ae04c3be {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.510558] env[61273]: DEBUG oslo_concurrency.lockutils [None req-759d0da7-f412-4fdf-a466-01ca69876e62 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Releasing lock "refresh_cache-4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 568.510642] env[61273]: DEBUG nova.compute.manager [None req-759d0da7-f412-4fdf-a466-01ca69876e62 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 568.510938] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-759d0da7-f412-4fdf-a466-01ca69876e62 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 568.511198] env[61273]: DEBUG oslo_concurrency.lockutils [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] Acquiring lock "b41c6d21-5e7f-427f-95ce-830fe0da8bc6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 568.511399] env[61273]: DEBUG oslo_concurrency.lockutils [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] Lock "b41c6d21-5e7f-427f-95ce-830fe0da8bc6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 568.513212] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72b7c03a-0b2c-45e7-a9b0-6cce48bc1d9e {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.543516] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65ed5169-bbf3-4be5-b7ff-1c38ac660262 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.549477] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-759d0da7-f412-4fdf-a466-01ca69876e62 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Powering off the VM {{(pid=61273) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 568.550105] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7e2173f5-a810-4b22-9b3c-8070a08017b6 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.555132] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5112163-5e25-4766-b225-9db28748255f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.560128] env[61273]: DEBUG oslo_vmware.api [None req-759d0da7-f412-4fdf-a466-01ca69876e62 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Waiting for the task: (returnval){ [ 568.560128] env[61273]: value = "task-375256" [ 568.560128] env[61273]: _type = "Task" [ 568.560128] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 568.572158] env[61273]: DEBUG nova.compute.provider_tree [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 568.572674] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 6edc8e71613f4f66b8aa7a067a96b516 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 568.578513] env[61273]: DEBUG oslo_vmware.api [None req-759d0da7-f412-4fdf-a466-01ca69876e62 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Task: {'id': task-375256, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 568.581276] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6edc8e71613f4f66b8aa7a067a96b516 [ 569.073195] env[61273]: DEBUG oslo_vmware.api [None req-759d0da7-f412-4fdf-a466-01ca69876e62 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Task: {'id': task-375256, 'name': PowerOffVM_Task, 'duration_secs': 0.117764} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 569.073474] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-759d0da7-f412-4fdf-a466-01ca69876e62 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Powered off the VM {{(pid=61273) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 569.073641] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-759d0da7-f412-4fdf-a466-01ca69876e62 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Unregistering the VM {{(pid=61273) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 569.073916] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-80186f5c-2dff-4329-bb65-38ca5ae49157 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.078557] env[61273]: DEBUG nova.scheduler.client.report [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 569.081373] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 64b0bd4e4a924dd8b9e532c984a43cc9 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 569.097842] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 64b0bd4e4a924dd8b9e532c984a43cc9 [ 569.108451] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-759d0da7-f412-4fdf-a466-01ca69876e62 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Unregistered the VM {{(pid=61273) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 569.108675] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-759d0da7-f412-4fdf-a466-01ca69876e62 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Deleting contents of the VM from datastore datastore2 {{(pid=61273) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 569.108855] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-759d0da7-f412-4fdf-a466-01ca69876e62 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Deleting the datastore file [datastore2] 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2 {{(pid=61273) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 569.109141] env[61273]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d80ffbfb-be3f-4940-bcd2-c23ca9559796 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.115354] env[61273]: DEBUG oslo_vmware.api [None req-759d0da7-f412-4fdf-a466-01ca69876e62 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Waiting for the task: (returnval){ [ 569.115354] env[61273]: value = "task-375258" [ 569.115354] env[61273]: _type = "Task" [ 569.115354] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 569.126422] env[61273]: DEBUG oslo_vmware.api [None req-759d0da7-f412-4fdf-a466-01ca69876e62 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Task: {'id': task-375258, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 569.585418] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61273) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 569.585671] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 10.101s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 569.585977] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 22.652s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 569.587840] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] Expecting reply to msg 00e68e8604fc4aa8816dc8d1d3bf375a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 569.589027] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 569.589393] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Getting list of instances from cluster (obj){ [ 569.589393] env[61273]: value = "domain-c8" [ 569.589393] env[61273]: _type = "ClusterComputeResource" [ 569.589393] env[61273]: } {{(pid=61273) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 569.590425] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f8297df-52f3-48b2-96a9-68f159fc0cc1 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.601195] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Got total of 1 instances {{(pid=61273) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 569.601997] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 6587f7d4ced64e5f9d3f1cd5a1f9d147 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 569.620827] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6587f7d4ced64e5f9d3f1cd5a1f9d147 [ 569.625080] env[61273]: DEBUG oslo_vmware.api [None req-759d0da7-f412-4fdf-a466-01ca69876e62 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Task: {'id': task-375258, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.160108} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 569.625338] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-759d0da7-f412-4fdf-a466-01ca69876e62 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Deleted the datastore file {{(pid=61273) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 569.625519] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-759d0da7-f412-4fdf-a466-01ca69876e62 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Deleted contents of the VM from datastore datastore2 {{(pid=61273) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 569.625697] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-759d0da7-f412-4fdf-a466-01ca69876e62 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 569.625866] env[61273]: INFO nova.compute.manager [None req-759d0da7-f412-4fdf-a466-01ca69876e62 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Took 1.12 seconds to destroy the instance on the hypervisor. [ 569.626119] env[61273]: DEBUG oslo.service.loopingcall [None req-759d0da7-f412-4fdf-a466-01ca69876e62 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 569.626324] env[61273]: DEBUG nova.compute.manager [-] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 569.626420] env[61273]: DEBUG nova.network.neutron [-] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 569.632132] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 00e68e8604fc4aa8816dc8d1d3bf375a [ 569.651672] env[61273]: DEBUG nova.network.neutron [-] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 569.652689] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg bc66de15d99345fb82471844435df7c9 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 569.668516] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bc66de15d99345fb82471844435df7c9 [ 570.155716] env[61273]: DEBUG nova.network.neutron [-] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 570.155716] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 334bfbf93b2d497eb08ae4575542ecdf in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 570.172673] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 334bfbf93b2d497eb08ae4575542ecdf [ 570.483961] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a68aa4d0-2990-4483-b083-0985f6dfcbb6 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.491806] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e31fd008-e463-48fa-8d45-26d29aefb428 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.521969] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-678c4642-18dd-432a-ab6c-9ad6cfaecb57 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.530046] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11b4d69a-0e35-4f07-90c2-1f937b4ee4af {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.545942] env[61273]: DEBUG nova.compute.provider_tree [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 570.546497] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] Expecting reply to msg 989e5f38a97b427a912fc7811cb46c9f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 570.555017] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 989e5f38a97b427a912fc7811cb46c9f [ 570.663951] env[61273]: INFO nova.compute.manager [-] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Took 1.04 seconds to deallocate network for instance. [ 570.668823] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-759d0da7-f412-4fdf-a466-01ca69876e62 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Expecting reply to msg 679ba7b2deba4fc9a82d36667bd6f5a1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 570.721295] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 679ba7b2deba4fc9a82d36667bd6f5a1 [ 571.049713] env[61273]: DEBUG nova.scheduler.client.report [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 571.054556] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] Expecting reply to msg f7ea482c132b415c91764adbc6e2ff73 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 571.069903] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f7ea482c132b415c91764adbc6e2ff73 [ 571.171492] env[61273]: DEBUG oslo_concurrency.lockutils [None req-759d0da7-f412-4fdf-a466-01ca69876e62 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 571.557094] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.971s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 571.557877] env[61273]: ERROR nova.compute.manager [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 14197306-66ad-4077-914c-b79fd0a658db, please check neutron logs for more information. [ 571.557877] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] Traceback (most recent call last): [ 571.557877] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 571.557877] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] self.driver.spawn(context, instance, image_meta, [ 571.557877] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 571.557877] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 571.557877] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 571.557877] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] vm_ref = self.build_virtual_machine(instance, [ 571.557877] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 571.557877] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] vif_infos = vmwarevif.get_vif_info(self._session, [ 571.557877] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 571.558290] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] for vif in network_info: [ 571.558290] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 571.558290] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] return self._sync_wrapper(fn, *args, **kwargs) [ 571.558290] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 571.558290] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] self.wait() [ 571.558290] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 571.558290] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] self[:] = self._gt.wait() [ 571.558290] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 571.558290] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] return self._exit_event.wait() [ 571.558290] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 571.558290] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] result = hub.switch() [ 571.558290] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 571.558290] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] return self.greenlet.switch() [ 571.558897] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 571.558897] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] result = function(*args, **kwargs) [ 571.558897] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 571.558897] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] return func(*args, **kwargs) [ 571.558897] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 571.558897] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] raise e [ 571.558897] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 571.558897] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] nwinfo = self.network_api.allocate_for_instance( [ 571.558897] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 571.558897] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] created_port_ids = self._update_ports_for_instance( [ 571.558897] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 571.558897] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] with excutils.save_and_reraise_exception(): [ 571.558897] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 571.559315] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] self.force_reraise() [ 571.559315] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 571.559315] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] raise self.value [ 571.559315] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 571.559315] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] updated_port = self._update_port( [ 571.559315] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 571.559315] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] _ensure_no_port_binding_failure(port) [ 571.559315] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 571.559315] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] raise exception.PortBindingFailed(port_id=port['id']) [ 571.559315] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] nova.exception.PortBindingFailed: Binding failed for port 14197306-66ad-4077-914c-b79fd0a658db, please check neutron logs for more information. [ 571.559315] env[61273]: ERROR nova.compute.manager [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] [ 571.559687] env[61273]: DEBUG nova.compute.utils [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] Binding failed for port 14197306-66ad-4077-914c-b79fd0a658db, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 571.560203] env[61273]: DEBUG oslo_concurrency.lockutils [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.526s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 571.561309] env[61273]: INFO nova.compute.claims [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 571.563155] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] Expecting reply to msg 834be423bcb2403a8ba6bfc718cb1119 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 571.564962] env[61273]: DEBUG nova.compute.manager [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] Build of instance 1784917b-8a7e-4974-b8b3-f8f2b3db019a was re-scheduled: Binding failed for port 14197306-66ad-4077-914c-b79fd0a658db, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 571.575671] env[61273]: DEBUG nova.compute.manager [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 571.575996] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] Acquiring lock "refresh_cache-1784917b-8a7e-4974-b8b3-f8f2b3db019a" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 571.576201] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] Acquired lock "refresh_cache-1784917b-8a7e-4974-b8b3-f8f2b3db019a" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 571.576378] env[61273]: DEBUG nova.network.neutron [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 571.576875] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] Expecting reply to msg 3c8e25c3bf844ca58c437ebb3834088b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 571.583703] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3c8e25c3bf844ca58c437ebb3834088b [ 571.610981] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 834be423bcb2403a8ba6bfc718cb1119 [ 572.083686] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] Expecting reply to msg bafa0195a48647c8b3a79d3429be1e09 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 572.089444] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bafa0195a48647c8b3a79d3429be1e09 [ 572.159048] env[61273]: DEBUG nova.network.neutron [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 572.402876] env[61273]: DEBUG nova.network.neutron [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 572.403388] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] Expecting reply to msg 2b876d2b47fd48df885a6847a77f01f1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 572.411302] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2b876d2b47fd48df885a6847a77f01f1 [ 572.906144] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] Releasing lock "refresh_cache-1784917b-8a7e-4974-b8b3-f8f2b3db019a" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 572.906389] env[61273]: DEBUG nova.compute.manager [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 572.906566] env[61273]: DEBUG nova.compute.manager [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 572.906731] env[61273]: DEBUG nova.network.neutron [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 572.999769] env[61273]: DEBUG nova.network.neutron [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 573.000384] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] Expecting reply to msg c596074609c54aad839aeffb3697a4c6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 573.010507] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c596074609c54aad839aeffb3697a4c6 [ 573.034390] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd814d6c-ad1b-4f57-ad4b-46a1f1d959f6 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.043173] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fa59b19-4bf4-4a29-af8c-28e5dfc47a7d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.086017] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2e6b6ab-7ac5-49e2-a134-3f93772aebff {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.094564] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c72d1ec8-c5d0-48cb-88e1-d389368e488e {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.110155] env[61273]: DEBUG nova.compute.provider_tree [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 573.110669] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] Expecting reply to msg 1a74123b2f0c49cb99f0d149aa2549b9 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 573.121920] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1a74123b2f0c49cb99f0d149aa2549b9 [ 573.511204] env[61273]: DEBUG nova.network.neutron [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 573.511751] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] Expecting reply to msg 5821717243cb40cdb9380537ef778a9a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 573.523193] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5821717243cb40cdb9380537ef778a9a [ 573.613359] env[61273]: DEBUG nova.scheduler.client.report [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 573.615861] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] Expecting reply to msg 2125c4b66dde439da8d21d1e5b6ff69e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 573.631800] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2125c4b66dde439da8d21d1e5b6ff69e [ 573.819671] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] Acquiring lock "12c47e99-faf4-4083-a46f-4e33c451e980" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 573.819921] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] Lock "12c47e99-faf4-4083-a46f-4e33c451e980" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 574.013969] env[61273]: INFO nova.compute.manager [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] [instance: 1784917b-8a7e-4974-b8b3-f8f2b3db019a] Took 1.11 seconds to deallocate network for instance. [ 574.015703] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] Expecting reply to msg 184a13962d0b4e2ea0626a7c58c0b6da in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 574.066206] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 184a13962d0b4e2ea0626a7c58c0b6da [ 574.122255] env[61273]: DEBUG oslo_concurrency.lockutils [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.562s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 574.122784] env[61273]: DEBUG nova.compute.manager [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 574.124923] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] Expecting reply to msg aca20b43825445158dd31ee767c866de in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 574.125877] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 23.523s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 574.127520] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Expecting reply to msg eee6430fdf0647ef8504cec7b98ad5af in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 574.178608] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aca20b43825445158dd31ee767c866de [ 574.188699] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eee6430fdf0647ef8504cec7b98ad5af [ 574.521409] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] Expecting reply to msg de8d9f90ff334c42b4422fbb19b4f37d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 574.565423] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg de8d9f90ff334c42b4422fbb19b4f37d [ 574.631498] env[61273]: DEBUG nova.compute.utils [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 574.631747] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] Expecting reply to msg 5f3a2e6814934864b549e0f7c9d04d83 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 574.632995] env[61273]: DEBUG nova.compute.manager [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 574.633161] env[61273]: DEBUG nova.network.neutron [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 574.642586] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5f3a2e6814934864b549e0f7c9d04d83 [ 574.708808] env[61273]: DEBUG nova.policy [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2eca29707860426b983cf69143b5d6b4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '760605cb896a4cbdb566a044b1e14fdc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 575.056225] env[61273]: INFO nova.scheduler.client.report [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] Deleted allocations for instance 1784917b-8a7e-4974-b8b3-f8f2b3db019a [ 575.063355] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] Expecting reply to msg 9b9108a4d3d44e2d8455576b3cd0f3cc in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 575.083921] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9b9108a4d3d44e2d8455576b3cd0f3cc [ 575.138227] env[61273]: DEBUG nova.compute.manager [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 575.139838] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] Expecting reply to msg e01bb350d66e4308a4a0262632ed2835 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 575.156261] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eec557e-2783-4273-bda0-79c455896f99 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.164558] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0b5697e-fcc8-4005-b005-71107f066c4f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.206324] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e01bb350d66e4308a4a0262632ed2835 [ 575.207405] env[61273]: DEBUG nova.network.neutron [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] Successfully created port: bc737d73-0b13-4405-8b56-e8520c00a00f {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 575.212131] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebcef379-f4ed-4ecf-b489-16ab247c34a2 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.218571] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0dfef2d-b3b5-4daa-b81b-deeaef794034 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.232437] env[61273]: DEBUG nova.compute.provider_tree [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 575.232947] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Expecting reply to msg 68995e6b3b544b2683f989b66d873d4a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 575.243034] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 68995e6b3b544b2683f989b66d873d4a [ 575.515013] env[61273]: DEBUG nova.compute.manager [None req-3ea02469-83e8-4508-ada8-88c7f39678ec tempest-ServerDiagnosticsV248Test-273727717 tempest-ServerDiagnosticsV248Test-273727717-project-admin] [instance: 62c3b24d-bee7-4dd2-a6c7-9303c7c28cca] Checking state {{(pid=61273) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 575.516575] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81b695dd-b721-45cc-86d5-e9add7d07bd9 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.523842] env[61273]: INFO nova.compute.manager [None req-3ea02469-83e8-4508-ada8-88c7f39678ec tempest-ServerDiagnosticsV248Test-273727717 tempest-ServerDiagnosticsV248Test-273727717-project-admin] [instance: 62c3b24d-bee7-4dd2-a6c7-9303c7c28cca] Retrieving diagnostics [ 575.524631] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9c34dbc-b46c-4661-a582-0dcba451d555 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.568544] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3eedc122-e763-4f29-b667-fc450f60f44b tempest-ServerRescueTestJSON-1950758181 tempest-ServerRescueTestJSON-1950758181-project-member] Lock "1784917b-8a7e-4974-b8b3-f8f2b3db019a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 51.973s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 575.569186] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 9e0c3a2b93ae45a7b2c4927ceae24eb1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 575.583750] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9e0c3a2b93ae45a7b2c4927ceae24eb1 [ 575.605108] env[61273]: DEBUG oslo_concurrency.lockutils [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] Acquiring lock "5ea287cd-ba85-446d-85d0-5a050fe49f17" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 575.605341] env[61273]: DEBUG oslo_concurrency.lockutils [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] Lock "5ea287cd-ba85-446d-85d0-5a050fe49f17" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 575.645789] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] Expecting reply to msg 7de5ac81147e4e01a7bc450dfce267e5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 575.694099] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7de5ac81147e4e01a7bc450dfce267e5 [ 575.735936] env[61273]: DEBUG nova.scheduler.client.report [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 575.738184] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Expecting reply to msg 3dc57c4e351a46c0a6b29115d2184e46 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 575.762630] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3dc57c4e351a46c0a6b29115d2184e46 [ 576.071404] env[61273]: DEBUG nova.compute.manager [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 576.073289] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg d45d9f926b4249349adcf9e9702ba804 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 576.105628] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d45d9f926b4249349adcf9e9702ba804 [ 576.148729] env[61273]: DEBUG nova.compute.manager [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 576.173625] env[61273]: DEBUG nova.virt.hardware [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 576.173874] env[61273]: DEBUG nova.virt.hardware [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 576.174019] env[61273]: DEBUG nova.virt.hardware [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 576.174233] env[61273]: DEBUG nova.virt.hardware [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 576.174394] env[61273]: DEBUG nova.virt.hardware [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 576.174538] env[61273]: DEBUG nova.virt.hardware [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 576.174740] env[61273]: DEBUG nova.virt.hardware [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 576.174885] env[61273]: DEBUG nova.virt.hardware [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 576.175041] env[61273]: DEBUG nova.virt.hardware [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 576.175195] env[61273]: DEBUG nova.virt.hardware [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 576.175359] env[61273]: DEBUG nova.virt.hardware [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 576.176579] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0bea8e4-f9fc-435d-8b17-12404ac822da {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.185085] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d43480b-d3d5-427e-9f66-eacd8d48272c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.240473] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.114s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 576.241226] env[61273]: ERROR nova.compute.manager [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 228821ca-e981-405b-8952-8a1718103d3c] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port f8f7383d-3e1b-4747-82b6-44e40294ada9, please check neutron logs for more information. [ 576.241226] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] Traceback (most recent call last): [ 576.241226] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 576.241226] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] self.driver.spawn(context, instance, image_meta, [ 576.241226] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 576.241226] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 576.241226] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 576.241226] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] vm_ref = self.build_virtual_machine(instance, [ 576.241226] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 576.241226] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] vif_infos = vmwarevif.get_vif_info(self._session, [ 576.241226] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 576.241631] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] for vif in network_info: [ 576.241631] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 576.241631] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] return self._sync_wrapper(fn, *args, **kwargs) [ 576.241631] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 576.241631] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] self.wait() [ 576.241631] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 576.241631] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] self[:] = self._gt.wait() [ 576.241631] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 576.241631] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] return self._exit_event.wait() [ 576.241631] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 576.241631] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] result = hub.switch() [ 576.241631] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 576.241631] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] return self.greenlet.switch() [ 576.241992] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 576.241992] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] result = function(*args, **kwargs) [ 576.241992] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 576.241992] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] return func(*args, **kwargs) [ 576.241992] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 576.241992] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] raise e [ 576.241992] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 576.241992] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] nwinfo = self.network_api.allocate_for_instance( [ 576.241992] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 576.241992] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] created_port_ids = self._update_ports_for_instance( [ 576.241992] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 576.241992] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] with excutils.save_and_reraise_exception(): [ 576.241992] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 576.242348] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] self.force_reraise() [ 576.242348] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 576.242348] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] raise self.value [ 576.242348] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 576.242348] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] updated_port = self._update_port( [ 576.242348] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 576.242348] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] _ensure_no_port_binding_failure(port) [ 576.242348] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 576.242348] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] raise exception.PortBindingFailed(port_id=port['id']) [ 576.242348] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] nova.exception.PortBindingFailed: Binding failed for port f8f7383d-3e1b-4747-82b6-44e40294ada9, please check neutron logs for more information. [ 576.242348] env[61273]: ERROR nova.compute.manager [instance: 228821ca-e981-405b-8952-8a1718103d3c] [ 576.242784] env[61273]: DEBUG nova.compute.utils [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 228821ca-e981-405b-8952-8a1718103d3c] Binding failed for port f8f7383d-3e1b-4747-82b6-44e40294ada9, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 576.246494] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.665s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 576.246494] env[61273]: INFO nova.compute.claims [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 576.248346] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Expecting reply to msg c643b7ff6927402f84b510f9a8e9ecd6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 576.249653] env[61273]: DEBUG nova.compute.manager [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 228821ca-e981-405b-8952-8a1718103d3c] Build of instance 228821ca-e981-405b-8952-8a1718103d3c was re-scheduled: Binding failed for port f8f7383d-3e1b-4747-82b6-44e40294ada9, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 576.250100] env[61273]: DEBUG nova.compute.manager [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 228821ca-e981-405b-8952-8a1718103d3c] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 576.250323] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Acquiring lock "refresh_cache-228821ca-e981-405b-8952-8a1718103d3c" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 576.250468] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Acquired lock "refresh_cache-228821ca-e981-405b-8952-8a1718103d3c" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 576.250696] env[61273]: DEBUG nova.network.neutron [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 228821ca-e981-405b-8952-8a1718103d3c] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 576.251004] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Expecting reply to msg ff236ae933834c2cb13edafa9d900023 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 576.263041] env[61273]: ERROR nova.compute.manager [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port bc737d73-0b13-4405-8b56-e8520c00a00f, please check neutron logs for more information. [ 576.263041] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 576.263041] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 576.263041] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 576.263041] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 576.263041] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 576.263041] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 576.263041] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 576.263041] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 576.263041] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 576.263041] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 576.263041] env[61273]: ERROR nova.compute.manager raise self.value [ 576.263041] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 576.263041] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 576.263041] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 576.263041] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 576.263568] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 576.263568] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 576.263568] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port bc737d73-0b13-4405-8b56-e8520c00a00f, please check neutron logs for more information. [ 576.263568] env[61273]: ERROR nova.compute.manager [ 576.263568] env[61273]: Traceback (most recent call last): [ 576.263568] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 576.263568] env[61273]: listener.cb(fileno) [ 576.263568] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 576.263568] env[61273]: result = function(*args, **kwargs) [ 576.263568] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 576.263568] env[61273]: return func(*args, **kwargs) [ 576.263568] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 576.263568] env[61273]: raise e [ 576.263568] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 576.263568] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 576.263568] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 576.263568] env[61273]: created_port_ids = self._update_ports_for_instance( [ 576.263568] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 576.263568] env[61273]: with excutils.save_and_reraise_exception(): [ 576.263568] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 576.263568] env[61273]: self.force_reraise() [ 576.263568] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 576.263568] env[61273]: raise self.value [ 576.263568] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 576.263568] env[61273]: updated_port = self._update_port( [ 576.263568] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 576.263568] env[61273]: _ensure_no_port_binding_failure(port) [ 576.263568] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 576.263568] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 576.264471] env[61273]: nova.exception.PortBindingFailed: Binding failed for port bc737d73-0b13-4405-8b56-e8520c00a00f, please check neutron logs for more information. [ 576.264471] env[61273]: Removing descriptor: 15 [ 576.264471] env[61273]: ERROR nova.compute.manager [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port bc737d73-0b13-4405-8b56-e8520c00a00f, please check neutron logs for more information. [ 576.264471] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] Traceback (most recent call last): [ 576.264471] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 576.264471] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] yield resources [ 576.264471] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 576.264471] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] self.driver.spawn(context, instance, image_meta, [ 576.264471] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 576.264471] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] self._vmops.spawn(context, instance, image_meta, injected_files, [ 576.264471] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 576.264471] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] vm_ref = self.build_virtual_machine(instance, [ 576.264903] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 576.264903] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] vif_infos = vmwarevif.get_vif_info(self._session, [ 576.264903] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 576.264903] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] for vif in network_info: [ 576.264903] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 576.264903] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] return self._sync_wrapper(fn, *args, **kwargs) [ 576.264903] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 576.264903] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] self.wait() [ 576.264903] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 576.264903] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] self[:] = self._gt.wait() [ 576.264903] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 576.264903] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] return self._exit_event.wait() [ 576.264903] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 576.265342] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] result = hub.switch() [ 576.265342] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 576.265342] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] return self.greenlet.switch() [ 576.265342] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 576.265342] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] result = function(*args, **kwargs) [ 576.265342] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 576.265342] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] return func(*args, **kwargs) [ 576.265342] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 576.265342] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] raise e [ 576.265342] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 576.265342] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] nwinfo = self.network_api.allocate_for_instance( [ 576.265342] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 576.265342] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] created_port_ids = self._update_ports_for_instance( [ 576.265935] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 576.265935] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] with excutils.save_and_reraise_exception(): [ 576.265935] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 576.265935] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] self.force_reraise() [ 576.265935] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 576.265935] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] raise self.value [ 576.265935] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 576.265935] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] updated_port = self._update_port( [ 576.265935] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 576.265935] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] _ensure_no_port_binding_failure(port) [ 576.265935] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 576.265935] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] raise exception.PortBindingFailed(port_id=port['id']) [ 576.266367] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] nova.exception.PortBindingFailed: Binding failed for port bc737d73-0b13-4405-8b56-e8520c00a00f, please check neutron logs for more information. [ 576.266367] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] [ 576.266367] env[61273]: INFO nova.compute.manager [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] Terminating instance [ 576.268962] env[61273]: DEBUG oslo_concurrency.lockutils [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] Acquiring lock "refresh_cache-f0c26eb3-e6d6-4d9f-9f07-5add9de6d126" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 576.268962] env[61273]: DEBUG oslo_concurrency.lockutils [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] Acquired lock "refresh_cache-f0c26eb3-e6d6-4d9f-9f07-5add9de6d126" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 576.268962] env[61273]: DEBUG nova.network.neutron [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 576.269383] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] Expecting reply to msg 2210839515e442d48e74a4d99651d383 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 576.270238] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ff236ae933834c2cb13edafa9d900023 [ 576.279629] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2210839515e442d48e74a4d99651d383 [ 576.290867] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c643b7ff6927402f84b510f9a8e9ecd6 [ 576.596605] env[61273]: DEBUG oslo_concurrency.lockutils [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 576.755615] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Expecting reply to msg bf20432d2c0e459ab74e191d68fc1d21 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 576.768510] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bf20432d2c0e459ab74e191d68fc1d21 [ 576.785543] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c8f1f2f5-5a51-4e1d-adb8-274558f0667c tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Expecting reply to msg db5365935a454878bec8e9cd620c07b1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 576.786308] env[61273]: DEBUG nova.network.neutron [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 228821ca-e981-405b-8952-8a1718103d3c] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 576.796559] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg db5365935a454878bec8e9cd620c07b1 [ 576.820780] env[61273]: DEBUG nova.network.neutron [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 576.831082] env[61273]: DEBUG nova.compute.manager [req-00800b20-3c28-436a-8948-d96aaa0a4f39 req-8b83168b-7aa6-4241-b822-dc43b40f7577 service nova] [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] Received event network-changed-bc737d73-0b13-4405-8b56-e8520c00a00f {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 576.831266] env[61273]: DEBUG nova.compute.manager [req-00800b20-3c28-436a-8948-d96aaa0a4f39 req-8b83168b-7aa6-4241-b822-dc43b40f7577 service nova] [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] Refreshing instance network info cache due to event network-changed-bc737d73-0b13-4405-8b56-e8520c00a00f. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 576.831455] env[61273]: DEBUG oslo_concurrency.lockutils [req-00800b20-3c28-436a-8948-d96aaa0a4f39 req-8b83168b-7aa6-4241-b822-dc43b40f7577 service nova] Acquiring lock "refresh_cache-f0c26eb3-e6d6-4d9f-9f07-5add9de6d126" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 576.882095] env[61273]: DEBUG nova.network.neutron [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 228821ca-e981-405b-8952-8a1718103d3c] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 576.882741] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Expecting reply to msg 3a452594d1994eb0beec2e4efba9cb0e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 576.893161] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3a452594d1994eb0beec2e4efba9cb0e [ 576.971035] env[61273]: DEBUG nova.network.neutron [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 576.971581] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] Expecting reply to msg 3f2ca05c80df4d298cede27b99d46783 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 576.981547] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3f2ca05c80df4d298cede27b99d46783 [ 577.289325] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c8f1f2f5-5a51-4e1d-adb8-274558f0667c tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Acquiring lock "62c3b24d-bee7-4dd2-a6c7-9303c7c28cca" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 577.289576] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c8f1f2f5-5a51-4e1d-adb8-274558f0667c tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Lock "62c3b24d-bee7-4dd2-a6c7-9303c7c28cca" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 577.289798] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c8f1f2f5-5a51-4e1d-adb8-274558f0667c tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Acquiring lock "62c3b24d-bee7-4dd2-a6c7-9303c7c28cca-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 577.289982] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c8f1f2f5-5a51-4e1d-adb8-274558f0667c tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Lock "62c3b24d-bee7-4dd2-a6c7-9303c7c28cca-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 577.290145] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c8f1f2f5-5a51-4e1d-adb8-274558f0667c tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Lock "62c3b24d-bee7-4dd2-a6c7-9303c7c28cca-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 577.294399] env[61273]: INFO nova.compute.manager [None req-c8f1f2f5-5a51-4e1d-adb8-274558f0667c tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] [instance: 62c3b24d-bee7-4dd2-a6c7-9303c7c28cca] Terminating instance [ 577.296935] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c8f1f2f5-5a51-4e1d-adb8-274558f0667c tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Acquiring lock "refresh_cache-62c3b24d-bee7-4dd2-a6c7-9303c7c28cca" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 577.297089] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c8f1f2f5-5a51-4e1d-adb8-274558f0667c tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Acquired lock "refresh_cache-62c3b24d-bee7-4dd2-a6c7-9303c7c28cca" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 577.297584] env[61273]: DEBUG nova.network.neutron [None req-c8f1f2f5-5a51-4e1d-adb8-274558f0667c tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] [instance: 62c3b24d-bee7-4dd2-a6c7-9303c7c28cca] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 577.297989] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c8f1f2f5-5a51-4e1d-adb8-274558f0667c tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Expecting reply to msg f1dcb627858c42e7a32cc1f40037522b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 577.305415] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f1dcb627858c42e7a32cc1f40037522b [ 577.387168] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Releasing lock "refresh_cache-228821ca-e981-405b-8952-8a1718103d3c" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 577.387168] env[61273]: DEBUG nova.compute.manager [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 577.387168] env[61273]: DEBUG nova.compute.manager [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 228821ca-e981-405b-8952-8a1718103d3c] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 577.387168] env[61273]: DEBUG nova.network.neutron [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 228821ca-e981-405b-8952-8a1718103d3c] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 577.430136] env[61273]: DEBUG nova.network.neutron [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 228821ca-e981-405b-8952-8a1718103d3c] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 577.430736] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Expecting reply to msg 9be1b5b61fa14737a2fc96f28646ccc8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 577.450298] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9be1b5b61fa14737a2fc96f28646ccc8 [ 577.474474] env[61273]: DEBUG oslo_concurrency.lockutils [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] Releasing lock "refresh_cache-f0c26eb3-e6d6-4d9f-9f07-5add9de6d126" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 577.474873] env[61273]: DEBUG nova.compute.manager [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 577.475056] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 577.476379] env[61273]: DEBUG oslo_concurrency.lockutils [req-00800b20-3c28-436a-8948-d96aaa0a4f39 req-8b83168b-7aa6-4241-b822-dc43b40f7577 service nova] Acquired lock "refresh_cache-f0c26eb3-e6d6-4d9f-9f07-5add9de6d126" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 577.476379] env[61273]: DEBUG nova.network.neutron [req-00800b20-3c28-436a-8948-d96aaa0a4f39 req-8b83168b-7aa6-4241-b822-dc43b40f7577 service nova] [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] Refreshing network info cache for port bc737d73-0b13-4405-8b56-e8520c00a00f {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 577.480037] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-00800b20-3c28-436a-8948-d96aaa0a4f39 req-8b83168b-7aa6-4241-b822-dc43b40f7577 service nova] Expecting reply to msg e75cee06ac0d4726866068c3c4193e69 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 577.480037] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c261cdaa-71c4-4ecc-932c-eaea42ea8e7a {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.486723] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e75cee06ac0d4726866068c3c4193e69 [ 577.501241] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-950d90ee-37a4-4f44-b880-6034c2d773d9 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.523373] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f0c26eb3-e6d6-4d9f-9f07-5add9de6d126 could not be found. [ 577.523373] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 577.523373] env[61273]: INFO nova.compute.manager [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] Took 0.05 seconds to destroy the instance on the hypervisor. [ 577.523373] env[61273]: DEBUG oslo.service.loopingcall [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 577.523670] env[61273]: DEBUG nova.compute.manager [-] [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 577.523670] env[61273]: DEBUG nova.network.neutron [-] [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 577.543657] env[61273]: DEBUG nova.network.neutron [-] [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 577.544192] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg e0cdc887a61c4b7b9ccdaa7869769107 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 577.553190] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e0cdc887a61c4b7b9ccdaa7869769107 [ 577.782543] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1816d9d9-0160-493c-8f6d-abd7dc49a67d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.790673] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1be968a-f948-4320-9e87-dabc6c32e1d8 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.826499] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88be61c9-ea8e-4cde-a739-a7ab05dbab37 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.836247] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35ab1659-bd57-475b-aae0-dd2a0c046d37 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.843166] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] Acquiring lock "297a5546-6159-462c-a436-032d94855c00" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 577.843390] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] Lock "297a5546-6159-462c-a436-032d94855c00" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 577.852923] env[61273]: DEBUG nova.compute.provider_tree [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 577.853409] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Expecting reply to msg f6dd5aca414d4500b277e23a35f19a18 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 577.871405] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f6dd5aca414d4500b277e23a35f19a18 [ 577.879719] env[61273]: DEBUG nova.network.neutron [None req-c8f1f2f5-5a51-4e1d-adb8-274558f0667c tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] [instance: 62c3b24d-bee7-4dd2-a6c7-9303c7c28cca] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 577.933608] env[61273]: DEBUG nova.network.neutron [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 228821ca-e981-405b-8952-8a1718103d3c] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 577.934155] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Expecting reply to msg 7908a86a91994854ad2b6fbb4f60e265 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 577.943238] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7908a86a91994854ad2b6fbb4f60e265 [ 578.047359] env[61273]: DEBUG nova.network.neutron [-] [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 578.047949] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg d378515fac714d1d8f63f21e343445fa in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 578.056302] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d378515fac714d1d8f63f21e343445fa [ 578.060663] env[61273]: DEBUG nova.network.neutron [None req-c8f1f2f5-5a51-4e1d-adb8-274558f0667c tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] [instance: 62c3b24d-bee7-4dd2-a6c7-9303c7c28cca] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 578.061205] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c8f1f2f5-5a51-4e1d-adb8-274558f0667c tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Expecting reply to msg 0caba106673241a8a8367bf13aefa780 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 578.069572] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0caba106673241a8a8367bf13aefa780 [ 578.182187] env[61273]: DEBUG nova.network.neutron [req-00800b20-3c28-436a-8948-d96aaa0a4f39 req-8b83168b-7aa6-4241-b822-dc43b40f7577 service nova] [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 578.262808] env[61273]: DEBUG nova.network.neutron [req-00800b20-3c28-436a-8948-d96aaa0a4f39 req-8b83168b-7aa6-4241-b822-dc43b40f7577 service nova] [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 578.263310] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-00800b20-3c28-436a-8948-d96aaa0a4f39 req-8b83168b-7aa6-4241-b822-dc43b40f7577 service nova] Expecting reply to msg d5784f07d82b49f28b3646a9435797fe in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 578.272570] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d5784f07d82b49f28b3646a9435797fe [ 578.357819] env[61273]: DEBUG nova.scheduler.client.report [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 578.361243] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Expecting reply to msg 5b1332813cd646a69cf1482ac7ce0cba in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 578.375565] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5b1332813cd646a69cf1482ac7ce0cba [ 578.438342] env[61273]: INFO nova.compute.manager [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 228821ca-e981-405b-8952-8a1718103d3c] Took 1.05 seconds to deallocate network for instance. [ 578.440204] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Expecting reply to msg 184ca450fde3483dafa8c7cda426bd7c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 578.503549] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 184ca450fde3483dafa8c7cda426bd7c [ 578.552054] env[61273]: INFO nova.compute.manager [-] [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] Took 1.03 seconds to deallocate network for instance. [ 578.553808] env[61273]: DEBUG nova.compute.claims [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 578.553808] env[61273]: DEBUG oslo_concurrency.lockutils [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 578.564236] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c8f1f2f5-5a51-4e1d-adb8-274558f0667c tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Releasing lock "refresh_cache-62c3b24d-bee7-4dd2-a6c7-9303c7c28cca" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 578.564236] env[61273]: DEBUG nova.compute.manager [None req-c8f1f2f5-5a51-4e1d-adb8-274558f0667c tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] [instance: 62c3b24d-bee7-4dd2-a6c7-9303c7c28cca] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 578.564236] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-c8f1f2f5-5a51-4e1d-adb8-274558f0667c tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] [instance: 62c3b24d-bee7-4dd2-a6c7-9303c7c28cca] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 578.565066] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-693adf1e-24f7-44e1-b859-ea8b7710315c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.573529] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8f1f2f5-5a51-4e1d-adb8-274558f0667c tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] [instance: 62c3b24d-bee7-4dd2-a6c7-9303c7c28cca] Powering off the VM {{(pid=61273) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 578.573892] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a887165d-f0fc-4690-ac44-44317a08e015 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.580780] env[61273]: DEBUG oslo_vmware.api [None req-c8f1f2f5-5a51-4e1d-adb8-274558f0667c tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Waiting for the task: (returnval){ [ 578.580780] env[61273]: value = "task-375259" [ 578.580780] env[61273]: _type = "Task" [ 578.580780] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 578.603621] env[61273]: DEBUG oslo_vmware.api [None req-c8f1f2f5-5a51-4e1d-adb8-274558f0667c tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Task: {'id': task-375259, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.765756] env[61273]: DEBUG oslo_concurrency.lockutils [req-00800b20-3c28-436a-8948-d96aaa0a4f39 req-8b83168b-7aa6-4241-b822-dc43b40f7577 service nova] Releasing lock "refresh_cache-f0c26eb3-e6d6-4d9f-9f07-5add9de6d126" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 578.864325] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.619s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 578.864325] env[61273]: DEBUG nova.compute.manager [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 578.865362] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Expecting reply to msg 71531d992e1f4fffbc3fa69f56534bdf in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 578.866447] env[61273]: DEBUG oslo_concurrency.lockutils [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 26.460s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 578.866649] env[61273]: DEBUG nova.objects.instance [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61273) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 578.868425] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Expecting reply to msg d716b6e6f4b54adb85f55f704eab69fc in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 578.915804] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d716b6e6f4b54adb85f55f704eab69fc [ 578.918498] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 71531d992e1f4fffbc3fa69f56534bdf [ 578.945082] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Expecting reply to msg 5a9e7352ea3c48078ac54301a4d8949d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 579.017371] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5a9e7352ea3c48078ac54301a4d8949d [ 579.051378] env[61273]: DEBUG nova.compute.manager [req-823d8c1e-f7eb-4962-9ade-0eeeccd07b8d req-a7abd9c6-cf2a-40bb-b04a-3c75441f6b23 service nova] [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] Received event network-vif-deleted-bc737d73-0b13-4405-8b56-e8520c00a00f {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 579.096248] env[61273]: DEBUG oslo_vmware.api [None req-c8f1f2f5-5a51-4e1d-adb8-274558f0667c tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Task: {'id': task-375259, 'name': PowerOffVM_Task, 'duration_secs': 0.134321} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 579.096248] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8f1f2f5-5a51-4e1d-adb8-274558f0667c tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] [instance: 62c3b24d-bee7-4dd2-a6c7-9303c7c28cca] Powered off the VM {{(pid=61273) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 579.096248] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-c8f1f2f5-5a51-4e1d-adb8-274558f0667c tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] [instance: 62c3b24d-bee7-4dd2-a6c7-9303c7c28cca] Unregistering the VM {{(pid=61273) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 579.096248] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-18961075-c8fd-4eb3-934b-8094617a081c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.120202] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-c8f1f2f5-5a51-4e1d-adb8-274558f0667c tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] [instance: 62c3b24d-bee7-4dd2-a6c7-9303c7c28cca] Unregistered the VM {{(pid=61273) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 579.120503] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-c8f1f2f5-5a51-4e1d-adb8-274558f0667c tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] [instance: 62c3b24d-bee7-4dd2-a6c7-9303c7c28cca] Deleting contents of the VM from datastore datastore2 {{(pid=61273) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 579.120646] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8f1f2f5-5a51-4e1d-adb8-274558f0667c tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Deleting the datastore file [datastore2] 62c3b24d-bee7-4dd2-a6c7-9303c7c28cca {{(pid=61273) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 579.121058] env[61273]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-beec30f5-a909-40a0-8fbc-6f813484341f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.128429] env[61273]: DEBUG oslo_vmware.api [None req-c8f1f2f5-5a51-4e1d-adb8-274558f0667c tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Waiting for the task: (returnval){ [ 579.128429] env[61273]: value = "task-375261" [ 579.128429] env[61273]: _type = "Task" [ 579.128429] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 579.136928] env[61273]: DEBUG oslo_vmware.api [None req-c8f1f2f5-5a51-4e1d-adb8-274558f0667c tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Task: {'id': task-375261, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.373018] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Expecting reply to msg 3d28148048824d59b493137fc8cc0b16 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 579.376083] env[61273]: DEBUG nova.compute.utils [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 579.378154] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Expecting reply to msg f5b7cc6c56de4c2494ee3f2689e2ed61 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 579.378385] env[61273]: DEBUG nova.compute.manager [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 579.378582] env[61273]: DEBUG nova.network.neutron [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 579.381618] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3d28148048824d59b493137fc8cc0b16 [ 579.391570] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f5b7cc6c56de4c2494ee3f2689e2ed61 [ 579.431185] env[61273]: DEBUG nova.policy [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0986847129324981ba5df690cc76bcba', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ca33c1550e694c38a15f9be28d24b19c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 579.480115] env[61273]: INFO nova.scheduler.client.report [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Deleted allocations for instance 228821ca-e981-405b-8952-8a1718103d3c [ 579.497001] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Expecting reply to msg be843d1883e74cfcbaa241e10449b7f2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 579.510731] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg be843d1883e74cfcbaa241e10449b7f2 [ 579.644102] env[61273]: DEBUG oslo_vmware.api [None req-c8f1f2f5-5a51-4e1d-adb8-274558f0667c tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Task: {'id': task-375261, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.100461} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 579.644403] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8f1f2f5-5a51-4e1d-adb8-274558f0667c tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Deleted the datastore file {{(pid=61273) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 579.644596] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-c8f1f2f5-5a51-4e1d-adb8-274558f0667c tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] [instance: 62c3b24d-bee7-4dd2-a6c7-9303c7c28cca] Deleted contents of the VM from datastore datastore2 {{(pid=61273) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 579.644910] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-c8f1f2f5-5a51-4e1d-adb8-274558f0667c tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] [instance: 62c3b24d-bee7-4dd2-a6c7-9303c7c28cca] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 579.644960] env[61273]: INFO nova.compute.manager [None req-c8f1f2f5-5a51-4e1d-adb8-274558f0667c tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] [instance: 62c3b24d-bee7-4dd2-a6c7-9303c7c28cca] Took 1.08 seconds to destroy the instance on the hypervisor. [ 579.645186] env[61273]: DEBUG oslo.service.loopingcall [None req-c8f1f2f5-5a51-4e1d-adb8-274558f0667c tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 579.645377] env[61273]: DEBUG nova.compute.manager [-] [instance: 62c3b24d-bee7-4dd2-a6c7-9303c7c28cca] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 579.645471] env[61273]: DEBUG nova.network.neutron [-] [instance: 62c3b24d-bee7-4dd2-a6c7-9303c7c28cca] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 579.679888] env[61273]: DEBUG nova.network.neutron [-] [instance: 62c3b24d-bee7-4dd2-a6c7-9303c7c28cca] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 579.680568] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 8f760aaafe5646e3a1f70d1602cffdc8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 579.688370] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8f760aaafe5646e3a1f70d1602cffdc8 [ 579.758412] env[61273]: DEBUG nova.network.neutron [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] Successfully created port: a5e6f22e-f786-4eb3-9df7-8e5f76d3b84e {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 579.884228] env[61273]: DEBUG nova.compute.manager [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 579.884228] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Expecting reply to msg f1f471be98d84dd4a66f87cf50c946f4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 579.884228] env[61273]: DEBUG oslo_concurrency.lockutils [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.016s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 579.884228] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-bbba9f36-2feb-438a-a94b-e06b9012af64 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Expecting reply to msg 39ddcd3519d243fbb1e67db027752930 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 579.884228] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.452s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 579.885681] env[61273]: INFO nova.compute.claims [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 579.888512] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] Expecting reply to msg a6fabcc63dbc4c9295b123e7282b1e9d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 579.899600] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 39ddcd3519d243fbb1e67db027752930 [ 579.955454] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f1f471be98d84dd4a66f87cf50c946f4 [ 579.971271] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a6fabcc63dbc4c9295b123e7282b1e9d [ 580.006445] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3fde7998-9712-420b-a423-b457db635f29 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Lock "228821ca-e981-405b-8952-8a1718103d3c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.389s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 580.007073] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] Expecting reply to msg adf67c93db6e4b25aae9be6ed2e3dedb in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 580.019320] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg adf67c93db6e4b25aae9be6ed2e3dedb [ 580.183641] env[61273]: DEBUG nova.network.neutron [-] [instance: 62c3b24d-bee7-4dd2-a6c7-9303c7c28cca] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 580.184208] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg b49dc8c65543451dabaf3baf3bc17d21 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 580.193124] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b49dc8c65543451dabaf3baf3bc17d21 [ 580.391140] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Expecting reply to msg 854838e3afd642308e5e7a5cd08c3455 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 580.397431] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] Expecting reply to msg 832b545afe564e73bc7a2660118eba75 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 580.407067] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 832b545afe564e73bc7a2660118eba75 [ 580.450077] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 854838e3afd642308e5e7a5cd08c3455 [ 580.511398] env[61273]: DEBUG nova.compute.manager [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] [instance: 1804f229-97b9-4ee3-933d-715431a900f8] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 580.513325] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] Expecting reply to msg b8dbe2d35a97405886cdfd0fbf138632 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 580.569439] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b8dbe2d35a97405886cdfd0fbf138632 [ 580.690277] env[61273]: INFO nova.compute.manager [-] [instance: 62c3b24d-bee7-4dd2-a6c7-9303c7c28cca] Took 1.04 seconds to deallocate network for instance. [ 580.693401] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c8f1f2f5-5a51-4e1d-adb8-274558f0667c tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Expecting reply to msg 567eef28b96646cc83812d3fbccff609 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 580.737843] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 567eef28b96646cc83812d3fbccff609 [ 580.894691] env[61273]: DEBUG nova.compute.manager [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 580.933619] env[61273]: DEBUG nova.virt.hardware [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 580.933986] env[61273]: DEBUG nova.virt.hardware [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 580.934265] env[61273]: DEBUG nova.virt.hardware [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 580.934399] env[61273]: DEBUG nova.virt.hardware [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 580.934557] env[61273]: DEBUG nova.virt.hardware [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 580.934708] env[61273]: DEBUG nova.virt.hardware [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 580.934930] env[61273]: DEBUG nova.virt.hardware [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 580.935087] env[61273]: DEBUG nova.virt.hardware [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 580.935251] env[61273]: DEBUG nova.virt.hardware [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 580.935422] env[61273]: DEBUG nova.virt.hardware [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 580.935601] env[61273]: DEBUG nova.virt.hardware [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 580.936827] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8df89cb7-58a6-4173-a5a1-6b0e3d61c3b4 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.955085] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fe2385b-3eba-49b0-94a5-eec015cf5d20 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.058766] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 581.196694] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c8f1f2f5-5a51-4e1d-adb8-274558f0667c tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 581.395670] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-119efe40-96a2-43ba-87ea-164450497599 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.403640] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5d3555a-bc46-4203-a4f2-30d474754101 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.438309] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad1a4ec0-ce89-4015-8d1f-eb3394541255 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.445782] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c17b660-1c7a-44d9-9066-2e880831ac36 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.458704] env[61273]: DEBUG nova.compute.provider_tree [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 581.459203] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] Expecting reply to msg 9197a433db2e4106b227fce404f81094 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 581.473475] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9197a433db2e4106b227fce404f81094 [ 581.961574] env[61273]: DEBUG nova.scheduler.client.report [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 581.964051] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] Expecting reply to msg 230dad5a5d7b412788d81c01cf2d1fb4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 581.981819] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 230dad5a5d7b412788d81c01cf2d1fb4 [ 582.005898] env[61273]: DEBUG nova.compute.manager [req-f6435b39-147f-417f-aa20-b92ab9885e49 req-aa2307a6-e5bf-4581-b463-0edf746f1741 service nova] [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] Received event network-changed-a5e6f22e-f786-4eb3-9df7-8e5f76d3b84e {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 582.006094] env[61273]: DEBUG nova.compute.manager [req-f6435b39-147f-417f-aa20-b92ab9885e49 req-aa2307a6-e5bf-4581-b463-0edf746f1741 service nova] [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] Refreshing instance network info cache due to event network-changed-a5e6f22e-f786-4eb3-9df7-8e5f76d3b84e. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 582.006305] env[61273]: DEBUG oslo_concurrency.lockutils [req-f6435b39-147f-417f-aa20-b92ab9885e49 req-aa2307a6-e5bf-4581-b463-0edf746f1741 service nova] Acquiring lock "refresh_cache-8f37dfb3-c4d2-4c41-91eb-e3c7ca640050" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 582.006441] env[61273]: DEBUG oslo_concurrency.lockutils [req-f6435b39-147f-417f-aa20-b92ab9885e49 req-aa2307a6-e5bf-4581-b463-0edf746f1741 service nova] Acquired lock "refresh_cache-8f37dfb3-c4d2-4c41-91eb-e3c7ca640050" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 582.006590] env[61273]: DEBUG nova.network.neutron [req-f6435b39-147f-417f-aa20-b92ab9885e49 req-aa2307a6-e5bf-4581-b463-0edf746f1741 service nova] [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] Refreshing network info cache for port a5e6f22e-f786-4eb3-9df7-8e5f76d3b84e {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 582.007037] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-f6435b39-147f-417f-aa20-b92ab9885e49 req-aa2307a6-e5bf-4581-b463-0edf746f1741 service nova] Expecting reply to msg 3c9d2fbedef24bb3987d7a7dcf4a0cd2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 582.021672] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3c9d2fbedef24bb3987d7a7dcf4a0cd2 [ 582.144769] env[61273]: ERROR nova.compute.manager [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port a5e6f22e-f786-4eb3-9df7-8e5f76d3b84e, please check neutron logs for more information. [ 582.144769] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 582.144769] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 582.144769] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 582.144769] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 582.144769] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 582.144769] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 582.144769] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 582.144769] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 582.144769] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 582.144769] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 582.144769] env[61273]: ERROR nova.compute.manager raise self.value [ 582.144769] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 582.144769] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 582.144769] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 582.144769] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 582.145313] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 582.145313] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 582.145313] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port a5e6f22e-f786-4eb3-9df7-8e5f76d3b84e, please check neutron logs for more information. [ 582.145313] env[61273]: ERROR nova.compute.manager [ 582.145313] env[61273]: Traceback (most recent call last): [ 582.145313] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 582.145313] env[61273]: listener.cb(fileno) [ 582.145313] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 582.145313] env[61273]: result = function(*args, **kwargs) [ 582.145313] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 582.145313] env[61273]: return func(*args, **kwargs) [ 582.145313] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 582.145313] env[61273]: raise e [ 582.145313] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 582.145313] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 582.145313] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 582.145313] env[61273]: created_port_ids = self._update_ports_for_instance( [ 582.145313] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 582.145313] env[61273]: with excutils.save_and_reraise_exception(): [ 582.145313] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 582.145313] env[61273]: self.force_reraise() [ 582.145313] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 582.145313] env[61273]: raise self.value [ 582.145313] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 582.145313] env[61273]: updated_port = self._update_port( [ 582.145313] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 582.145313] env[61273]: _ensure_no_port_binding_failure(port) [ 582.145313] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 582.145313] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 582.146199] env[61273]: nova.exception.PortBindingFailed: Binding failed for port a5e6f22e-f786-4eb3-9df7-8e5f76d3b84e, please check neutron logs for more information. [ 582.146199] env[61273]: Removing descriptor: 15 [ 582.146199] env[61273]: ERROR nova.compute.manager [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port a5e6f22e-f786-4eb3-9df7-8e5f76d3b84e, please check neutron logs for more information. [ 582.146199] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] Traceback (most recent call last): [ 582.146199] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 582.146199] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] yield resources [ 582.146199] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 582.146199] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] self.driver.spawn(context, instance, image_meta, [ 582.146199] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 582.146199] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] self._vmops.spawn(context, instance, image_meta, injected_files, [ 582.146199] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 582.146199] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] vm_ref = self.build_virtual_machine(instance, [ 582.146663] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 582.146663] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] vif_infos = vmwarevif.get_vif_info(self._session, [ 582.146663] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 582.146663] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] for vif in network_info: [ 582.146663] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 582.146663] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] return self._sync_wrapper(fn, *args, **kwargs) [ 582.146663] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 582.146663] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] self.wait() [ 582.146663] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 582.146663] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] self[:] = self._gt.wait() [ 582.146663] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 582.146663] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] return self._exit_event.wait() [ 582.146663] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 582.147104] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] result = hub.switch() [ 582.147104] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 582.147104] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] return self.greenlet.switch() [ 582.147104] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 582.147104] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] result = function(*args, **kwargs) [ 582.147104] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 582.147104] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] return func(*args, **kwargs) [ 582.147104] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 582.147104] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] raise e [ 582.147104] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 582.147104] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] nwinfo = self.network_api.allocate_for_instance( [ 582.147104] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 582.147104] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] created_port_ids = self._update_ports_for_instance( [ 582.147531] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 582.147531] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] with excutils.save_and_reraise_exception(): [ 582.147531] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 582.147531] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] self.force_reraise() [ 582.147531] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 582.147531] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] raise self.value [ 582.147531] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 582.147531] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] updated_port = self._update_port( [ 582.147531] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 582.147531] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] _ensure_no_port_binding_failure(port) [ 582.147531] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 582.147531] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] raise exception.PortBindingFailed(port_id=port['id']) [ 582.147975] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] nova.exception.PortBindingFailed: Binding failed for port a5e6f22e-f786-4eb3-9df7-8e5f76d3b84e, please check neutron logs for more information. [ 582.147975] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] [ 582.147975] env[61273]: INFO nova.compute.manager [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] Terminating instance [ 582.148428] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Acquiring lock "refresh_cache-8f37dfb3-c4d2-4c41-91eb-e3c7ca640050" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 582.467668] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.584s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 582.468421] env[61273]: DEBUG nova.compute.manager [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 582.470701] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] Expecting reply to msg 6a9110991bfb45218676cec585b214a7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 582.471884] env[61273]: DEBUG oslo_concurrency.lockutils [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.156s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 582.473387] env[61273]: INFO nova.compute.claims [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 582.475538] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] Expecting reply to msg 1aff2b8c4f044de9b8dfe91ca4f8487e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 582.518167] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6a9110991bfb45218676cec585b214a7 [ 582.527706] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1aff2b8c4f044de9b8dfe91ca4f8487e [ 582.536599] env[61273]: DEBUG nova.network.neutron [req-f6435b39-147f-417f-aa20-b92ab9885e49 req-aa2307a6-e5bf-4581-b463-0edf746f1741 service nova] [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 582.667418] env[61273]: DEBUG nova.network.neutron [req-f6435b39-147f-417f-aa20-b92ab9885e49 req-aa2307a6-e5bf-4581-b463-0edf746f1741 service nova] [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 582.667418] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-f6435b39-147f-417f-aa20-b92ab9885e49 req-aa2307a6-e5bf-4581-b463-0edf746f1741 service nova] Expecting reply to msg fbf868c77ee147599c5a7e91b72cb03d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 582.676925] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fbf868c77ee147599c5a7e91b72cb03d [ 582.980089] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] Expecting reply to msg 36712278d96748dca716f513f65cbc7a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 582.981821] env[61273]: DEBUG nova.compute.utils [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 582.982387] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] Expecting reply to msg 16c6861454974277bf2b84343a159f7f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 582.983333] env[61273]: DEBUG nova.compute.manager [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 582.983499] env[61273]: DEBUG nova.network.neutron [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 583.000300] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 16c6861454974277bf2b84343a159f7f [ 583.014760] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 36712278d96748dca716f513f65cbc7a [ 583.036456] env[61273]: DEBUG nova.policy [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ceaca1b950d44ea7a0e720301e2f707c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2eeaa82f12ce4d39a8d6b32e7cbbcd9f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 583.171276] env[61273]: DEBUG oslo_concurrency.lockutils [req-f6435b39-147f-417f-aa20-b92ab9885e49 req-aa2307a6-e5bf-4581-b463-0edf746f1741 service nova] Releasing lock "refresh_cache-8f37dfb3-c4d2-4c41-91eb-e3c7ca640050" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 583.171276] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Acquired lock "refresh_cache-8f37dfb3-c4d2-4c41-91eb-e3c7ca640050" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 583.171276] env[61273]: DEBUG nova.network.neutron [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 583.171276] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Expecting reply to msg 951374eca603407387d0c6e438b8a25a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 583.177519] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 951374eca603407387d0c6e438b8a25a [ 583.487557] env[61273]: DEBUG nova.compute.manager [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 583.495190] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] Expecting reply to msg b81af197e8ff4b03b4d827efa5bfff44 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 583.549782] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b81af197e8ff4b03b4d827efa5bfff44 [ 583.651199] env[61273]: DEBUG nova.network.neutron [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] Successfully created port: 2c9b8cd4-1c30-438e-a736-fd3acb6e41ac {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 583.731193] env[61273]: DEBUG nova.network.neutron [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 583.968079] env[61273]: DEBUG nova.network.neutron [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 583.968605] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Expecting reply to msg 72fce1db645c4fec911d9af5d77d1c7e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 583.978010] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 72fce1db645c4fec911d9af5d77d1c7e [ 583.999586] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] Expecting reply to msg 6d9f82c503df4e99807a11557d182a94 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 584.004517] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1005ecee-83d2-4ccd-9065-b59743000ecf {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.018730] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0e67554-51e5-4401-a02e-50d417aed5e3 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.063585] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6d9f82c503df4e99807a11557d182a94 [ 584.065130] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8596f9a6-c8e7-4d45-9da9-2daf75fa2a63 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.076264] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f48bde5-0ebc-4ab9-9672-6cffed6d35ba {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.093755] env[61273]: DEBUG nova.compute.provider_tree [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 584.096279] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] Expecting reply to msg 088477f2e7414d88a4c37f8025b78364 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 584.117033] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 088477f2e7414d88a4c37f8025b78364 [ 584.173329] env[61273]: DEBUG oslo_concurrency.lockutils [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Acquiring lock "2b87dfbe-2b94-4787-a795-94f8b63f651c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 584.173573] env[61273]: DEBUG oslo_concurrency.lockutils [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Lock "2b87dfbe-2b94-4787-a795-94f8b63f651c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 584.474267] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Releasing lock "refresh_cache-8f37dfb3-c4d2-4c41-91eb-e3c7ca640050" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 584.474267] env[61273]: DEBUG nova.compute.manager [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 584.474267] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 584.474267] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e9ab47be-64a0-41ad-a1c3-b4f85a82ae04 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.484530] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c2487f1-0d33-40fe-b4fe-9dfbfdf35951 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.500231] env[61273]: DEBUG nova.compute.manager [req-ebf1a7ed-2050-459d-aff3-ca866ae69a43 req-af5f1bed-bce2-445c-9ebc-ee9b69fd1483 service nova] [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] Received event network-vif-deleted-a5e6f22e-f786-4eb3-9df7-8e5f76d3b84e {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 584.506947] env[61273]: DEBUG nova.compute.manager [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 584.517287] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050 could not be found. [ 584.517699] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 584.518015] env[61273]: INFO nova.compute.manager [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] Took 0.05 seconds to destroy the instance on the hypervisor. [ 584.518536] env[61273]: DEBUG oslo.service.loopingcall [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 584.519610] env[61273]: DEBUG nova.compute.manager [-] [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 584.519852] env[61273]: DEBUG nova.network.neutron [-] [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 584.535996] env[61273]: DEBUG nova.virt.hardware [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 584.536538] env[61273]: DEBUG nova.virt.hardware [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 584.536969] env[61273]: DEBUG nova.virt.hardware [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 584.537291] env[61273]: DEBUG nova.virt.hardware [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 584.537581] env[61273]: DEBUG nova.virt.hardware [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 584.538134] env[61273]: DEBUG nova.virt.hardware [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 584.538494] env[61273]: DEBUG nova.virt.hardware [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 584.538804] env[61273]: DEBUG nova.virt.hardware [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 584.539209] env[61273]: DEBUG nova.virt.hardware [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 584.539658] env[61273]: DEBUG nova.virt.hardware [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 584.540162] env[61273]: DEBUG nova.virt.hardware [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 584.541994] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccbc6001-cb15-421e-b6d3-151da3bcec8c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.553756] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-855f4aed-ae26-49e3-968f-cf8ec201a078 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.599271] env[61273]: DEBUG nova.scheduler.client.report [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 584.602401] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] Expecting reply to msg fa234be7e4754e458759d3e2f24cb368 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 584.618666] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fa234be7e4754e458759d3e2f24cb368 [ 584.620188] env[61273]: DEBUG nova.network.neutron [-] [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 584.621112] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 9fe94b12f8e748c7aa5e61d3e56e56b6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 584.635649] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9fe94b12f8e748c7aa5e61d3e56e56b6 [ 585.104583] env[61273]: DEBUG oslo_concurrency.lockutils [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.633s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 585.105031] env[61273]: DEBUG nova.compute.manager [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 585.107010] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] Expecting reply to msg 331458ddf8a6496bbfd34ec8ee5b0991 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 585.108057] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.206s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 585.109468] env[61273]: INFO nova.compute.claims [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 585.111160] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Expecting reply to msg be22d91fbaac4a2695046106747abd13 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 585.124259] env[61273]: ERROR nova.compute.manager [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 2c9b8cd4-1c30-438e-a736-fd3acb6e41ac, please check neutron logs for more information. [ 585.124259] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 585.124259] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 585.124259] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 585.124259] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 585.124259] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 585.124259] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 585.124259] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 585.124259] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 585.124259] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 585.124259] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 585.124259] env[61273]: ERROR nova.compute.manager raise self.value [ 585.124259] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 585.124259] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 585.124259] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 585.124259] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 585.124896] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 585.124896] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 585.124896] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 2c9b8cd4-1c30-438e-a736-fd3acb6e41ac, please check neutron logs for more information. [ 585.124896] env[61273]: ERROR nova.compute.manager [ 585.124896] env[61273]: Traceback (most recent call last): [ 585.124896] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 585.124896] env[61273]: listener.cb(fileno) [ 585.124896] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 585.124896] env[61273]: result = function(*args, **kwargs) [ 585.124896] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 585.124896] env[61273]: return func(*args, **kwargs) [ 585.124896] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 585.124896] env[61273]: raise e [ 585.124896] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 585.124896] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 585.124896] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 585.124896] env[61273]: created_port_ids = self._update_ports_for_instance( [ 585.124896] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 585.124896] env[61273]: with excutils.save_and_reraise_exception(): [ 585.124896] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 585.124896] env[61273]: self.force_reraise() [ 585.124896] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 585.124896] env[61273]: raise self.value [ 585.124896] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 585.124896] env[61273]: updated_port = self._update_port( [ 585.124896] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 585.124896] env[61273]: _ensure_no_port_binding_failure(port) [ 585.124896] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 585.124896] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 585.125928] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 2c9b8cd4-1c30-438e-a736-fd3acb6e41ac, please check neutron logs for more information. [ 585.125928] env[61273]: Removing descriptor: 15 [ 585.125928] env[61273]: DEBUG nova.network.neutron [-] [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 585.125928] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 6648e39d475a44cf9ea3663f6d034601 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 585.125928] env[61273]: ERROR nova.compute.manager [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 2c9b8cd4-1c30-438e-a736-fd3acb6e41ac, please check neutron logs for more information. [ 585.125928] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] Traceback (most recent call last): [ 585.125928] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 585.125928] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] yield resources [ 585.125928] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 585.125928] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] self.driver.spawn(context, instance, image_meta, [ 585.125928] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 585.126370] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] self._vmops.spawn(context, instance, image_meta, injected_files, [ 585.126370] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 585.126370] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] vm_ref = self.build_virtual_machine(instance, [ 585.126370] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 585.126370] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] vif_infos = vmwarevif.get_vif_info(self._session, [ 585.126370] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 585.126370] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] for vif in network_info: [ 585.126370] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 585.126370] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] return self._sync_wrapper(fn, *args, **kwargs) [ 585.126370] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 585.126370] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] self.wait() [ 585.126370] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 585.126370] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] self[:] = self._gt.wait() [ 585.126814] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 585.126814] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] return self._exit_event.wait() [ 585.126814] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 585.126814] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] result = hub.switch() [ 585.126814] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 585.126814] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] return self.greenlet.switch() [ 585.126814] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 585.126814] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] result = function(*args, **kwargs) [ 585.126814] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 585.126814] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] return func(*args, **kwargs) [ 585.126814] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 585.126814] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] raise e [ 585.126814] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 585.127204] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] nwinfo = self.network_api.allocate_for_instance( [ 585.127204] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 585.127204] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] created_port_ids = self._update_ports_for_instance( [ 585.127204] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 585.127204] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] with excutils.save_and_reraise_exception(): [ 585.127204] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 585.127204] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] self.force_reraise() [ 585.127204] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 585.127204] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] raise self.value [ 585.127204] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 585.127204] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] updated_port = self._update_port( [ 585.127204] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 585.127204] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] _ensure_no_port_binding_failure(port) [ 585.127631] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 585.127631] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] raise exception.PortBindingFailed(port_id=port['id']) [ 585.127631] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] nova.exception.PortBindingFailed: Binding failed for port 2c9b8cd4-1c30-438e-a736-fd3acb6e41ac, please check neutron logs for more information. [ 585.127631] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] [ 585.127631] env[61273]: INFO nova.compute.manager [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] Terminating instance [ 585.129047] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] Acquiring lock "refresh_cache-3f1f549f-8034-4685-b6f0-db5a7a2a4a32" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 585.129047] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] Acquired lock "refresh_cache-3f1f549f-8034-4685-b6f0-db5a7a2a4a32" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 585.129047] env[61273]: DEBUG nova.network.neutron [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 585.129047] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] Expecting reply to msg 60116f347b1c4656a639eef4d6fa9d50 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 585.142780] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 60116f347b1c4656a639eef4d6fa9d50 [ 585.158298] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6648e39d475a44cf9ea3663f6d034601 [ 585.175165] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 331458ddf8a6496bbfd34ec8ee5b0991 [ 585.175764] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg be22d91fbaac4a2695046106747abd13 [ 585.614650] env[61273]: DEBUG nova.compute.utils [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 585.615291] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] Expecting reply to msg 26124626d853420681bc92f0dc9de0b4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 585.617336] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Expecting reply to msg 15f83b7c2f6049008aa26ad82c55c109 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 585.620029] env[61273]: DEBUG nova.compute.manager [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 585.620029] env[61273]: DEBUG nova.network.neutron [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 585.625060] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 15f83b7c2f6049008aa26ad82c55c109 [ 585.628795] env[61273]: INFO nova.compute.manager [-] [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] Took 1.11 seconds to deallocate network for instance. [ 585.630919] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 26124626d853420681bc92f0dc9de0b4 [ 585.631496] env[61273]: DEBUG nova.compute.claims [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 585.631687] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 585.659573] env[61273]: DEBUG nova.network.neutron [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 585.669864] env[61273]: DEBUG nova.policy [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9fa51619ae0443588ddcffcc0dd9c9e6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8973425b51ed44779cd2caf61cddfae3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 585.885478] env[61273]: DEBUG nova.network.neutron [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 585.885478] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] Expecting reply to msg 12418261bf7042bba397ece0e7338152 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 585.901393] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 12418261bf7042bba397ece0e7338152 [ 586.010918] env[61273]: DEBUG nova.network.neutron [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] Successfully created port: a2439eab-fa0c-4c49-9830-697e9a092f05 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 586.119786] env[61273]: DEBUG nova.compute.manager [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 586.121606] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] Expecting reply to msg 58fa08904c0c4b95b8c4c8b8cd9e7f75 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 586.190141] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 58fa08904c0c4b95b8c4c8b8cd9e7f75 [ 586.387518] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] Releasing lock "refresh_cache-3f1f549f-8034-4685-b6f0-db5a7a2a4a32" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 586.387948] env[61273]: DEBUG nova.compute.manager [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 586.388187] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 586.388509] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c06f11b5-6481-49d1-b033-dde4e0a189ed {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.399468] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3da4490-4a2d-4325-b3c2-21bff17f5306 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.423760] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3f1f549f-8034-4685-b6f0-db5a7a2a4a32 could not be found. [ 586.423836] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 586.423997] env[61273]: INFO nova.compute.manager [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] Took 0.04 seconds to destroy the instance on the hypervisor. [ 586.424261] env[61273]: DEBUG oslo.service.loopingcall [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 586.427059] env[61273]: DEBUG nova.compute.manager [-] [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 586.427059] env[61273]: DEBUG nova.network.neutron [-] [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 586.455423] env[61273]: DEBUG nova.network.neutron [-] [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 586.455953] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg d224033377a54be9b417e05c8b77e727 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 586.463223] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d224033377a54be9b417e05c8b77e727 [ 586.591646] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ad4260f-dfc6-4911-a4a2-013c6bf83db7 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.599074] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f1c48a4-e135-481e-b124-5cc14e7ba9f6 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.632063] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] Expecting reply to msg d98c62c926184152a60dfe1dbe246fdf in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 586.633707] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68b8a5d7-ccaf-4231-913a-aeb1fac81c2f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.644797] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7b88f16-ddd1-4df6-bc3f-cec938ef751c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.654393] env[61273]: DEBUG nova.compute.provider_tree [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 586.654932] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Expecting reply to msg 658cb9b5d170434aaa6d710b141b320d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 586.671071] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 658cb9b5d170434aaa6d710b141b320d [ 586.686701] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d98c62c926184152a60dfe1dbe246fdf [ 586.692068] env[61273]: DEBUG nova.compute.manager [req-4dc64ba0-d5da-4438-8953-0e5240f93bd3 req-aea199eb-35ff-4431-bdd8-d6a472d4a6b5 service nova] [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] Received event network-changed-2c9b8cd4-1c30-438e-a736-fd3acb6e41ac {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 586.692256] env[61273]: DEBUG nova.compute.manager [req-4dc64ba0-d5da-4438-8953-0e5240f93bd3 req-aea199eb-35ff-4431-bdd8-d6a472d4a6b5 service nova] [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] Refreshing instance network info cache due to event network-changed-2c9b8cd4-1c30-438e-a736-fd3acb6e41ac. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 586.692593] env[61273]: DEBUG oslo_concurrency.lockutils [req-4dc64ba0-d5da-4438-8953-0e5240f93bd3 req-aea199eb-35ff-4431-bdd8-d6a472d4a6b5 service nova] Acquiring lock "refresh_cache-3f1f549f-8034-4685-b6f0-db5a7a2a4a32" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 586.692736] env[61273]: DEBUG oslo_concurrency.lockutils [req-4dc64ba0-d5da-4438-8953-0e5240f93bd3 req-aea199eb-35ff-4431-bdd8-d6a472d4a6b5 service nova] Acquired lock "refresh_cache-3f1f549f-8034-4685-b6f0-db5a7a2a4a32" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 586.692898] env[61273]: DEBUG nova.network.neutron [req-4dc64ba0-d5da-4438-8953-0e5240f93bd3 req-aea199eb-35ff-4431-bdd8-d6a472d4a6b5 service nova] [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] Refreshing network info cache for port 2c9b8cd4-1c30-438e-a736-fd3acb6e41ac {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 586.693344] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-4dc64ba0-d5da-4438-8953-0e5240f93bd3 req-aea199eb-35ff-4431-bdd8-d6a472d4a6b5 service nova] Expecting reply to msg 827e486cc3294b9abb513bd269fd4d04 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 586.699554] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 827e486cc3294b9abb513bd269fd4d04 [ 586.737760] env[61273]: DEBUG oslo_concurrency.lockutils [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] Acquiring lock "e2560c8e-61c6-4343-82cb-47dc5b1997fb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 586.737980] env[61273]: DEBUG oslo_concurrency.lockutils [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] Lock "e2560c8e-61c6-4343-82cb-47dc5b1997fb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 586.958723] env[61273]: DEBUG nova.network.neutron [-] [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 586.959205] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg e7e12d0e1a0c4cc8bf0379c62ca06a03 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 586.973070] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e7e12d0e1a0c4cc8bf0379c62ca06a03 [ 587.138409] env[61273]: DEBUG nova.compute.manager [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 587.157742] env[61273]: DEBUG nova.scheduler.client.report [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 587.160675] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Expecting reply to msg bcb26bf0e2104c22b4bab5db86e46207 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 587.172275] env[61273]: DEBUG nova.virt.hardware [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:33:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='330331584',id=23,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-863313287',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 587.172569] env[61273]: DEBUG nova.virt.hardware [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 587.172752] env[61273]: DEBUG nova.virt.hardware [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 587.172995] env[61273]: DEBUG nova.virt.hardware [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 587.173152] env[61273]: DEBUG nova.virt.hardware [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 587.173301] env[61273]: DEBUG nova.virt.hardware [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 587.173565] env[61273]: DEBUG nova.virt.hardware [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 587.173789] env[61273]: DEBUG nova.virt.hardware [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 587.174155] env[61273]: DEBUG nova.virt.hardware [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 587.174155] env[61273]: DEBUG nova.virt.hardware [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 587.174804] env[61273]: DEBUG nova.virt.hardware [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 587.175999] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aa05c56-135c-49e9-8a37-a795046a6530 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.179920] env[61273]: ERROR nova.compute.manager [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port a2439eab-fa0c-4c49-9830-697e9a092f05, please check neutron logs for more information. [ 587.179920] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 587.179920] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 587.179920] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 587.179920] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 587.179920] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 587.179920] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 587.179920] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 587.179920] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 587.179920] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 587.179920] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 587.179920] env[61273]: ERROR nova.compute.manager raise self.value [ 587.179920] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 587.179920] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 587.179920] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 587.179920] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 587.180479] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 587.180479] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 587.180479] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port a2439eab-fa0c-4c49-9830-697e9a092f05, please check neutron logs for more information. [ 587.180479] env[61273]: ERROR nova.compute.manager [ 587.180479] env[61273]: Traceback (most recent call last): [ 587.180479] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 587.180479] env[61273]: listener.cb(fileno) [ 587.180479] env[61273]: listener.cb(fileno) [ 587.180479] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 587.180479] env[61273]: result = function(*args, **kwargs) [ 587.180479] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 587.180479] env[61273]: return func(*args, **kwargs) [ 587.180479] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 587.180479] env[61273]: raise e [ 587.180479] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 587.180479] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 587.180479] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 587.180479] env[61273]: created_port_ids = self._update_ports_for_instance( [ 587.180479] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 587.180479] env[61273]: with excutils.save_and_reraise_exception(): [ 587.180479] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 587.180479] env[61273]: self.force_reraise() [ 587.180479] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 587.180479] env[61273]: raise self.value [ 587.180479] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 587.180479] env[61273]: updated_port = self._update_port( [ 587.180479] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 587.180479] env[61273]: _ensure_no_port_binding_failure(port) [ 587.180479] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 587.180479] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 587.218297] env[61273]: nova.exception.PortBindingFailed: Binding failed for port a2439eab-fa0c-4c49-9830-697e9a092f05, please check neutron logs for more information. [ 587.218297] env[61273]: Removing descriptor: 15 [ 587.218297] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bcb26bf0e2104c22b4bab5db86e46207 [ 587.218297] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b75cf7a-88db-4395-a682-14a090e9eee9 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.218297] env[61273]: ERROR nova.compute.manager [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port a2439eab-fa0c-4c49-9830-697e9a092f05, please check neutron logs for more information. [ 587.218297] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] Traceback (most recent call last): [ 587.218297] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 587.218297] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] yield resources [ 587.218297] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 587.218297] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] self.driver.spawn(context, instance, image_meta, [ 587.218297] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 587.218738] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 587.218738] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 587.218738] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] vm_ref = self.build_virtual_machine(instance, [ 587.218738] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 587.218738] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] vif_infos = vmwarevif.get_vif_info(self._session, [ 587.218738] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 587.218738] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] for vif in network_info: [ 587.218738] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 587.218738] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] return self._sync_wrapper(fn, *args, **kwargs) [ 587.218738] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 587.218738] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] self.wait() [ 587.218738] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 587.218738] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] self[:] = self._gt.wait() [ 587.219061] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 587.219061] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] return self._exit_event.wait() [ 587.219061] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 587.219061] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] current.throw(*self._exc) [ 587.219061] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 587.219061] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] result = function(*args, **kwargs) [ 587.219061] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 587.219061] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] return func(*args, **kwargs) [ 587.219061] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 587.219061] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] raise e [ 587.219061] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 587.219061] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] nwinfo = self.network_api.allocate_for_instance( [ 587.219061] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 587.219443] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] created_port_ids = self._update_ports_for_instance( [ 587.219443] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 587.219443] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] with excutils.save_and_reraise_exception(): [ 587.219443] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 587.219443] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] self.force_reraise() [ 587.219443] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 587.219443] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] raise self.value [ 587.219443] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 587.219443] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] updated_port = self._update_port( [ 587.219443] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 587.219443] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] _ensure_no_port_binding_failure(port) [ 587.219443] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 587.219443] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] raise exception.PortBindingFailed(port_id=port['id']) [ 587.219799] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] nova.exception.PortBindingFailed: Binding failed for port a2439eab-fa0c-4c49-9830-697e9a092f05, please check neutron logs for more information. [ 587.219799] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] [ 587.219799] env[61273]: INFO nova.compute.manager [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] Terminating instance [ 587.230325] env[61273]: DEBUG nova.network.neutron [req-4dc64ba0-d5da-4438-8953-0e5240f93bd3 req-aea199eb-35ff-4431-bdd8-d6a472d4a6b5 service nova] [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 587.237292] env[61273]: DEBUG oslo_concurrency.lockutils [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] Acquiring lock "refresh_cache-36c3ac75-5bfd-4a89-9ddb-28fded8da39c" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 587.237292] env[61273]: DEBUG oslo_concurrency.lockutils [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] Acquired lock "refresh_cache-36c3ac75-5bfd-4a89-9ddb-28fded8da39c" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 587.237387] env[61273]: DEBUG nova.network.neutron [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 587.240744] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] Expecting reply to msg 3055a5ece6db400891e1116ffb699950 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 587.258778] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3055a5ece6db400891e1116ffb699950 [ 587.424862] env[61273]: DEBUG nova.network.neutron [req-4dc64ba0-d5da-4438-8953-0e5240f93bd3 req-aea199eb-35ff-4431-bdd8-d6a472d4a6b5 service nova] [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 587.424862] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-4dc64ba0-d5da-4438-8953-0e5240f93bd3 req-aea199eb-35ff-4431-bdd8-d6a472d4a6b5 service nova] Expecting reply to msg b0d9891c386c4a0fb1bfd4e89a250394 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 587.444437] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b0d9891c386c4a0fb1bfd4e89a250394 [ 587.468752] env[61273]: INFO nova.compute.manager [-] [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] Took 1.04 seconds to deallocate network for instance. [ 587.470159] env[61273]: DEBUG nova.compute.claims [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 587.470159] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 587.668426] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.557s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 587.668426] env[61273]: DEBUG nova.compute.manager [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 587.668426] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Expecting reply to msg f806f488b32b4beb825af2ffc6da824c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 587.668820] env[61273]: DEBUG oslo_concurrency.lockutils [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.332s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 587.670315] env[61273]: INFO nova.compute.claims [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 587.676023] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Expecting reply to msg a3cfd3b9236342f49cc898097bf01368 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 587.758436] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f806f488b32b4beb825af2ffc6da824c [ 587.771143] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a3cfd3b9236342f49cc898097bf01368 [ 587.786078] env[61273]: DEBUG nova.network.neutron [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 587.927284] env[61273]: DEBUG oslo_concurrency.lockutils [req-4dc64ba0-d5da-4438-8953-0e5240f93bd3 req-aea199eb-35ff-4431-bdd8-d6a472d4a6b5 service nova] Releasing lock "refresh_cache-3f1f549f-8034-4685-b6f0-db5a7a2a4a32" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 587.927605] env[61273]: DEBUG nova.compute.manager [req-4dc64ba0-d5da-4438-8953-0e5240f93bd3 req-aea199eb-35ff-4431-bdd8-d6a472d4a6b5 service nova] [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] Received event network-vif-deleted-2c9b8cd4-1c30-438e-a736-fd3acb6e41ac {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 587.933832] env[61273]: DEBUG nova.network.neutron [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 587.934369] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] Expecting reply to msg ce88df5df0a4489983f5960596ef6326 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 587.943438] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ce88df5df0a4489983f5960596ef6326 [ 588.176517] env[61273]: DEBUG nova.compute.utils [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 588.177161] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Expecting reply to msg cc5ad49c3f464316983a5e1003796fc5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 588.179210] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Expecting reply to msg 3fe18d13d8e24e258063556079bad181 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 588.183955] env[61273]: DEBUG nova.compute.manager [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 588.183955] env[61273]: DEBUG nova.network.neutron [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 588.189827] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cc5ad49c3f464316983a5e1003796fc5 [ 588.190830] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3fe18d13d8e24e258063556079bad181 [ 588.251056] env[61273]: DEBUG nova.policy [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0bb003baa0764f52a66a6a7253669732', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6c9ed91d497142ddb16a51df48b5c996', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 588.436907] env[61273]: DEBUG oslo_concurrency.lockutils [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] Releasing lock "refresh_cache-36c3ac75-5bfd-4a89-9ddb-28fded8da39c" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 588.437357] env[61273]: DEBUG nova.compute.manager [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 588.437554] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 588.437873] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b28b2d5f-1269-4dbf-9261-f74844e3248b {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.448617] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c925702b-c715-40c0-a715-16e01242bdaf {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.480605] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 36c3ac75-5bfd-4a89-9ddb-28fded8da39c could not be found. [ 588.480740] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 588.480914] env[61273]: INFO nova.compute.manager [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] Took 0.04 seconds to destroy the instance on the hypervisor. [ 588.481150] env[61273]: DEBUG oslo.service.loopingcall [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 588.481368] env[61273]: DEBUG nova.compute.manager [-] [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 588.481457] env[61273]: DEBUG nova.network.neutron [-] [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 588.501868] env[61273]: DEBUG nova.network.neutron [-] [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 588.503439] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 9c0aaaaf650d4fafb26c473580351f29 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 588.510527] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9c0aaaaf650d4fafb26c473580351f29 [ 588.681381] env[61273]: DEBUG nova.compute.manager [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 588.683730] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Expecting reply to msg 3ce101e372a74b04845abed557537ff3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 588.722680] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3ce101e372a74b04845abed557537ff3 [ 588.801673] env[61273]: DEBUG nova.compute.manager [req-9ab26bd2-e9a3-4867-bd92-2047acfec9db req-51a9d04d-99f1-488d-9d57-80692be44375 service nova] [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] Received event network-changed-a2439eab-fa0c-4c49-9830-697e9a092f05 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 588.801886] env[61273]: DEBUG nova.compute.manager [req-9ab26bd2-e9a3-4867-bd92-2047acfec9db req-51a9d04d-99f1-488d-9d57-80692be44375 service nova] [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] Refreshing instance network info cache due to event network-changed-a2439eab-fa0c-4c49-9830-697e9a092f05. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 588.802107] env[61273]: DEBUG oslo_concurrency.lockutils [req-9ab26bd2-e9a3-4867-bd92-2047acfec9db req-51a9d04d-99f1-488d-9d57-80692be44375 service nova] Acquiring lock "refresh_cache-36c3ac75-5bfd-4a89-9ddb-28fded8da39c" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 588.802249] env[61273]: DEBUG oslo_concurrency.lockutils [req-9ab26bd2-e9a3-4867-bd92-2047acfec9db req-51a9d04d-99f1-488d-9d57-80692be44375 service nova] Acquired lock "refresh_cache-36c3ac75-5bfd-4a89-9ddb-28fded8da39c" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 588.802403] env[61273]: DEBUG nova.network.neutron [req-9ab26bd2-e9a3-4867-bd92-2047acfec9db req-51a9d04d-99f1-488d-9d57-80692be44375 service nova] [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] Refreshing network info cache for port a2439eab-fa0c-4c49-9830-697e9a092f05 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 588.804418] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-9ab26bd2-e9a3-4867-bd92-2047acfec9db req-51a9d04d-99f1-488d-9d57-80692be44375 service nova] Expecting reply to msg d94d01f23a5a4106b5881f6779586249 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 588.813083] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d94d01f23a5a4106b5881f6779586249 [ 588.863475] env[61273]: DEBUG nova.network.neutron [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] Successfully created port: 319ef93a-07c4-43fa-885a-2600407961b4 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 589.005241] env[61273]: DEBUG nova.network.neutron [-] [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 589.007019] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg f1be528337124aa683e4ae2d5fdcb57b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 589.017840] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f1be528337124aa683e4ae2d5fdcb57b [ 589.132970] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70a72b73-4b5c-4aa4-a355-5da44312154d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.145962] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9428b6da-e9e7-42cf-8632-4ac58dbe185c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.182181] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-314c88d6-3724-4d84-b82e-22eceb52607f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.189708] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2cbc5fb-9122-49bf-bc36-7061ddf73400 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.207281] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Expecting reply to msg 016cbe036204400ba73cf91dd1c66548 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 589.216901] env[61273]: DEBUG nova.compute.provider_tree [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 589.216901] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Expecting reply to msg 95673a73e1f04893bc5e79023c861f58 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 589.224802] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 95673a73e1f04893bc5e79023c861f58 [ 589.245115] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 016cbe036204400ba73cf91dd1c66548 [ 589.333606] env[61273]: DEBUG nova.network.neutron [req-9ab26bd2-e9a3-4867-bd92-2047acfec9db req-51a9d04d-99f1-488d-9d57-80692be44375 service nova] [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 589.492379] env[61273]: DEBUG nova.network.neutron [req-9ab26bd2-e9a3-4867-bd92-2047acfec9db req-51a9d04d-99f1-488d-9d57-80692be44375 service nova] [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 589.492975] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-9ab26bd2-e9a3-4867-bd92-2047acfec9db req-51a9d04d-99f1-488d-9d57-80692be44375 service nova] Expecting reply to msg cd205f720b7b4bffa8c83ed4b12eea20 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 589.503194] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cd205f720b7b4bffa8c83ed4b12eea20 [ 589.508486] env[61273]: INFO nova.compute.manager [-] [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] Took 1.03 seconds to deallocate network for instance. [ 589.511107] env[61273]: DEBUG nova.compute.claims [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 589.511343] env[61273]: DEBUG oslo_concurrency.lockutils [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 589.711858] env[61273]: DEBUG nova.compute.manager [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 589.725349] env[61273]: DEBUG nova.scheduler.client.report [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 589.727809] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Expecting reply to msg dc335a42f637404f98592a5d5b08bdc5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 589.742476] env[61273]: DEBUG nova.virt.hardware [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 589.742714] env[61273]: DEBUG nova.virt.hardware [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 589.742865] env[61273]: DEBUG nova.virt.hardware [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 589.743042] env[61273]: DEBUG nova.virt.hardware [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 589.743183] env[61273]: DEBUG nova.virt.hardware [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 589.743325] env[61273]: DEBUG nova.virt.hardware [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 589.743526] env[61273]: DEBUG nova.virt.hardware [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 589.743682] env[61273]: DEBUG nova.virt.hardware [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 589.743845] env[61273]: DEBUG nova.virt.hardware [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 589.744071] env[61273]: DEBUG nova.virt.hardware [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 589.744294] env[61273]: DEBUG nova.virt.hardware [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 589.745141] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f8a3785-93a3-4fe4-b985-f9ce36e86008 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.748740] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dc335a42f637404f98592a5d5b08bdc5 [ 589.755123] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b76f644-9ebc-41e5-a43a-426e6c8c002b {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.968694] env[61273]: ERROR nova.compute.manager [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 319ef93a-07c4-43fa-885a-2600407961b4, please check neutron logs for more information. [ 589.968694] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 589.968694] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 589.968694] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 589.968694] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 589.968694] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 589.968694] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 589.968694] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 589.968694] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 589.968694] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 589.968694] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 589.968694] env[61273]: ERROR nova.compute.manager raise self.value [ 589.968694] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 589.968694] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 589.968694] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 589.968694] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 589.969218] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 589.969218] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 589.969218] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 319ef93a-07c4-43fa-885a-2600407961b4, please check neutron logs for more information. [ 589.969218] env[61273]: ERROR nova.compute.manager [ 589.969218] env[61273]: Traceback (most recent call last): [ 589.969218] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 589.969218] env[61273]: listener.cb(fileno) [ 589.969218] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 589.969218] env[61273]: result = function(*args, **kwargs) [ 589.969218] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 589.969218] env[61273]: return func(*args, **kwargs) [ 589.969218] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 589.969218] env[61273]: raise e [ 589.969218] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 589.969218] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 589.969218] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 589.969218] env[61273]: created_port_ids = self._update_ports_for_instance( [ 589.969218] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 589.969218] env[61273]: with excutils.save_and_reraise_exception(): [ 589.969218] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 589.969218] env[61273]: self.force_reraise() [ 589.969218] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 589.969218] env[61273]: raise self.value [ 589.969218] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 589.969218] env[61273]: updated_port = self._update_port( [ 589.969218] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 589.969218] env[61273]: _ensure_no_port_binding_failure(port) [ 589.969218] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 589.969218] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 589.969974] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 319ef93a-07c4-43fa-885a-2600407961b4, please check neutron logs for more information. [ 589.969974] env[61273]: Removing descriptor: 15 [ 589.969974] env[61273]: ERROR nova.compute.manager [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 319ef93a-07c4-43fa-885a-2600407961b4, please check neutron logs for more information. [ 589.969974] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] Traceback (most recent call last): [ 589.969974] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 589.969974] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] yield resources [ 589.969974] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 589.969974] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] self.driver.spawn(context, instance, image_meta, [ 589.969974] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 589.969974] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 589.969974] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 589.969974] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] vm_ref = self.build_virtual_machine(instance, [ 589.970246] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 589.970246] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] vif_infos = vmwarevif.get_vif_info(self._session, [ 589.970246] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 589.970246] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] for vif in network_info: [ 589.970246] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 589.970246] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] return self._sync_wrapper(fn, *args, **kwargs) [ 589.970246] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 589.970246] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] self.wait() [ 589.970246] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 589.970246] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] self[:] = self._gt.wait() [ 589.970246] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 589.970246] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] return self._exit_event.wait() [ 589.970246] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 589.970528] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] result = hub.switch() [ 589.970528] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 589.970528] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] return self.greenlet.switch() [ 589.970528] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 589.970528] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] result = function(*args, **kwargs) [ 589.970528] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 589.970528] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] return func(*args, **kwargs) [ 589.970528] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 589.970528] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] raise e [ 589.970528] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 589.970528] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] nwinfo = self.network_api.allocate_for_instance( [ 589.970528] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 589.970528] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] created_port_ids = self._update_ports_for_instance( [ 589.970812] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 589.970812] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] with excutils.save_and_reraise_exception(): [ 589.970812] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 589.970812] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] self.force_reraise() [ 589.970812] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 589.970812] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] raise self.value [ 589.970812] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 589.970812] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] updated_port = self._update_port( [ 589.970812] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 589.970812] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] _ensure_no_port_binding_failure(port) [ 589.970812] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 589.970812] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] raise exception.PortBindingFailed(port_id=port['id']) [ 589.971106] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] nova.exception.PortBindingFailed: Binding failed for port 319ef93a-07c4-43fa-885a-2600407961b4, please check neutron logs for more information. [ 589.971106] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] [ 589.971106] env[61273]: INFO nova.compute.manager [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] Terminating instance [ 589.972664] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Acquiring lock "refresh_cache-5cddeea1-7558-4c12-afdc-2ea7a706881a" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 589.972735] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Acquired lock "refresh_cache-5cddeea1-7558-4c12-afdc-2ea7a706881a" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 589.972871] env[61273]: DEBUG nova.network.neutron [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 589.973294] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Expecting reply to msg 485063091d0e40ec8911008757a8b38d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 589.980990] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 485063091d0e40ec8911008757a8b38d [ 590.003549] env[61273]: DEBUG oslo_concurrency.lockutils [req-9ab26bd2-e9a3-4867-bd92-2047acfec9db req-51a9d04d-99f1-488d-9d57-80692be44375 service nova] Releasing lock "refresh_cache-36c3ac75-5bfd-4a89-9ddb-28fded8da39c" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 590.003805] env[61273]: DEBUG nova.compute.manager [req-9ab26bd2-e9a3-4867-bd92-2047acfec9db req-51a9d04d-99f1-488d-9d57-80692be44375 service nova] [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] Received event network-vif-deleted-a2439eab-fa0c-4c49-9830-697e9a092f05 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 590.230755] env[61273]: DEBUG oslo_concurrency.lockutils [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.562s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 590.231252] env[61273]: DEBUG nova.compute.manager [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 590.233037] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Expecting reply to msg 679dd85ad42d4ee395d0d187b7878a08 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 590.234067] env[61273]: DEBUG oslo_concurrency.lockutils [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.244s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 590.235464] env[61273]: INFO nova.compute.claims [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 590.237088] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] Expecting reply to msg 77b16441088542f7ab5115cf6d001c67 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 590.279991] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 77b16441088542f7ab5115cf6d001c67 [ 590.282270] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 679dd85ad42d4ee395d0d187b7878a08 [ 590.497595] env[61273]: DEBUG nova.network.neutron [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 590.576056] env[61273]: DEBUG nova.network.neutron [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 590.576595] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Expecting reply to msg 27a54cb36bdb4acda8de9048c6969dc2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 590.587472] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 27a54cb36bdb4acda8de9048c6969dc2 [ 590.739710] env[61273]: DEBUG nova.compute.utils [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 590.740381] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Expecting reply to msg 014fe662f13e44bc82ff858f89b625c3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 590.742445] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] Expecting reply to msg 5d3aaf7e6ec24bdb9cc5426a05c7034a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 590.743425] env[61273]: DEBUG nova.compute.manager [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 590.743591] env[61273]: DEBUG nova.network.neutron [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 590.751065] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 014fe662f13e44bc82ff858f89b625c3 [ 590.751572] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5d3aaf7e6ec24bdb9cc5426a05c7034a [ 590.785832] env[61273]: DEBUG nova.policy [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0bb003baa0764f52a66a6a7253669732', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6c9ed91d497142ddb16a51df48b5c996', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 590.855696] env[61273]: DEBUG nova.compute.manager [req-cbc909ca-0a2c-4bc0-936f-66053a94b8c6 req-73632fb0-b1cb-40ba-9c4f-c7823f4becf1 service nova] [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] Received event network-changed-319ef93a-07c4-43fa-885a-2600407961b4 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 590.855919] env[61273]: DEBUG nova.compute.manager [req-cbc909ca-0a2c-4bc0-936f-66053a94b8c6 req-73632fb0-b1cb-40ba-9c4f-c7823f4becf1 service nova] [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] Refreshing instance network info cache due to event network-changed-319ef93a-07c4-43fa-885a-2600407961b4. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 590.856148] env[61273]: DEBUG oslo_concurrency.lockutils [req-cbc909ca-0a2c-4bc0-936f-66053a94b8c6 req-73632fb0-b1cb-40ba-9c4f-c7823f4becf1 service nova] Acquiring lock "refresh_cache-5cddeea1-7558-4c12-afdc-2ea7a706881a" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 591.087072] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Releasing lock "refresh_cache-5cddeea1-7558-4c12-afdc-2ea7a706881a" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 591.087072] env[61273]: DEBUG nova.compute.manager [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 591.087072] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 591.087072] env[61273]: DEBUG oslo_concurrency.lockutils [req-cbc909ca-0a2c-4bc0-936f-66053a94b8c6 req-73632fb0-b1cb-40ba-9c4f-c7823f4becf1 service nova] Acquired lock "refresh_cache-5cddeea1-7558-4c12-afdc-2ea7a706881a" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 591.087072] env[61273]: DEBUG nova.network.neutron [req-cbc909ca-0a2c-4bc0-936f-66053a94b8c6 req-73632fb0-b1cb-40ba-9c4f-c7823f4becf1 service nova] [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] Refreshing network info cache for port 319ef93a-07c4-43fa-885a-2600407961b4 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 591.087553] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-cbc909ca-0a2c-4bc0-936f-66053a94b8c6 req-73632fb0-b1cb-40ba-9c4f-c7823f4becf1 service nova] Expecting reply to msg 0d096e1e1a504078ad7e6076ee75ff9f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 591.087553] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dd73868f-7533-4622-981b-718541c1d1fc {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.091017] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0d096e1e1a504078ad7e6076ee75ff9f [ 591.095632] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29f1c705-cd3a-47f1-b6eb-e34489eb0808 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.108485] env[61273]: DEBUG nova.network.neutron [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] Successfully created port: 6c735bdb-9615-424c-b35d-b618b55a0ca8 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 591.124606] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5cddeea1-7558-4c12-afdc-2ea7a706881a could not be found. [ 591.124833] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 591.125007] env[61273]: INFO nova.compute.manager [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] Took 0.04 seconds to destroy the instance on the hypervisor. [ 591.125245] env[61273]: DEBUG oslo.service.loopingcall [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 591.125455] env[61273]: DEBUG nova.compute.manager [-] [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 591.125546] env[61273]: DEBUG nova.network.neutron [-] [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 591.144060] env[61273]: DEBUG nova.network.neutron [-] [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 591.144553] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 52d90cb4b9ed47b881ec9416fe249d22 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 591.151769] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 52d90cb4b9ed47b881ec9416fe249d22 [ 591.244451] env[61273]: DEBUG nova.compute.manager [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 591.246288] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Expecting reply to msg 0f33c4a34f9948a1a49f5581a91ca7a3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 591.300837] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0f33c4a34f9948a1a49f5581a91ca7a3 [ 591.607452] env[61273]: DEBUG nova.network.neutron [req-cbc909ca-0a2c-4bc0-936f-66053a94b8c6 req-73632fb0-b1cb-40ba-9c4f-c7823f4becf1 service nova] [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 591.646308] env[61273]: DEBUG nova.network.neutron [-] [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 591.646839] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg c08a55c0e36647ee9102b59d81584c03 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 591.654799] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c08a55c0e36647ee9102b59d81584c03 [ 591.691381] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-620d62c1-c8c6-4121-a1c0-f16ea9b7b40a {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.702050] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b31dddc-a7b4-43cd-b6e0-56f48938225e {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.740586] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07d10e45-7419-4011-8ee9-38d02a629f53 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.744239] env[61273]: DEBUG nova.network.neutron [req-cbc909ca-0a2c-4bc0-936f-66053a94b8c6 req-73632fb0-b1cb-40ba-9c4f-c7823f4becf1 service nova] [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 591.744995] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-cbc909ca-0a2c-4bc0-936f-66053a94b8c6 req-73632fb0-b1cb-40ba-9c4f-c7823f4becf1 service nova] Expecting reply to msg c9799a643c3042509925cebfec0634be in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 591.755598] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c9799a643c3042509925cebfec0634be [ 591.758319] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Expecting reply to msg b4bbca5d63a647e79403c8c0992f8a56 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 591.770963] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a35f33ad-9485-4be3-a375-e59a18d63c1b {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.791875] env[61273]: DEBUG nova.compute.provider_tree [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 591.791875] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] Expecting reply to msg f28b9d0d3fb34bbdad0e4d3ee5c57776 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 591.797119] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b4bbca5d63a647e79403c8c0992f8a56 [ 591.802190] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f28b9d0d3fb34bbdad0e4d3ee5c57776 [ 592.149119] env[61273]: INFO nova.compute.manager [-] [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] Took 1.02 seconds to deallocate network for instance. [ 592.151932] env[61273]: DEBUG nova.compute.claims [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 592.152150] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 592.248878] env[61273]: ERROR nova.compute.manager [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 6c735bdb-9615-424c-b35d-b618b55a0ca8, please check neutron logs for more information. [ 592.248878] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 592.248878] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 592.248878] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 592.248878] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 592.248878] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 592.248878] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 592.248878] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 592.248878] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 592.248878] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 592.248878] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 592.248878] env[61273]: ERROR nova.compute.manager raise self.value [ 592.248878] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 592.248878] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 592.248878] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 592.248878] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 592.250002] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 592.250002] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 592.250002] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 6c735bdb-9615-424c-b35d-b618b55a0ca8, please check neutron logs for more information. [ 592.250002] env[61273]: ERROR nova.compute.manager [ 592.250002] env[61273]: Traceback (most recent call last): [ 592.250002] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 592.250002] env[61273]: listener.cb(fileno) [ 592.250002] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 592.250002] env[61273]: result = function(*args, **kwargs) [ 592.250002] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 592.250002] env[61273]: return func(*args, **kwargs) [ 592.250002] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 592.250002] env[61273]: raise e [ 592.250002] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 592.250002] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 592.250002] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 592.250002] env[61273]: created_port_ids = self._update_ports_for_instance( [ 592.250002] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 592.250002] env[61273]: with excutils.save_and_reraise_exception(): [ 592.250002] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 592.250002] env[61273]: self.force_reraise() [ 592.250002] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 592.250002] env[61273]: raise self.value [ 592.250002] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 592.250002] env[61273]: updated_port = self._update_port( [ 592.250002] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 592.250002] env[61273]: _ensure_no_port_binding_failure(port) [ 592.250002] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 592.250002] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 592.251252] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 6c735bdb-9615-424c-b35d-b618b55a0ca8, please check neutron logs for more information. [ 592.251252] env[61273]: Removing descriptor: 15 [ 592.255820] env[61273]: DEBUG oslo_concurrency.lockutils [req-cbc909ca-0a2c-4bc0-936f-66053a94b8c6 req-73632fb0-b1cb-40ba-9c4f-c7823f4becf1 service nova] Releasing lock "refresh_cache-5cddeea1-7558-4c12-afdc-2ea7a706881a" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 592.256134] env[61273]: DEBUG nova.compute.manager [req-cbc909ca-0a2c-4bc0-936f-66053a94b8c6 req-73632fb0-b1cb-40ba-9c4f-c7823f4becf1 service nova] [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] Received event network-vif-deleted-319ef93a-07c4-43fa-885a-2600407961b4 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 592.271747] env[61273]: DEBUG nova.compute.manager [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 592.294069] env[61273]: DEBUG nova.scheduler.client.report [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 592.297458] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] Expecting reply to msg 6c5fc9fc570d41589f7160cf9c1b39fe in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 592.310224] env[61273]: DEBUG nova.virt.hardware [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 592.310516] env[61273]: DEBUG nova.virt.hardware [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 592.310735] env[61273]: DEBUG nova.virt.hardware [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 592.310951] env[61273]: DEBUG nova.virt.hardware [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 592.311125] env[61273]: DEBUG nova.virt.hardware [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 592.311361] env[61273]: DEBUG nova.virt.hardware [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 592.311519] env[61273]: DEBUG nova.virt.hardware [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 592.311781] env[61273]: DEBUG nova.virt.hardware [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 592.311978] env[61273]: DEBUG nova.virt.hardware [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 592.312205] env[61273]: DEBUG nova.virt.hardware [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 592.312416] env[61273]: DEBUG nova.virt.hardware [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 592.313474] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cbc5c16-fba5-4324-aa8d-db8da7eeea5b {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.316761] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6c5fc9fc570d41589f7160cf9c1b39fe [ 592.325457] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15bbfe21-5e0e-4311-a0c8-2d5520c0d80d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.347524] env[61273]: ERROR nova.compute.manager [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 6c735bdb-9615-424c-b35d-b618b55a0ca8, please check neutron logs for more information. [ 592.347524] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] Traceback (most recent call last): [ 592.347524] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 592.347524] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] yield resources [ 592.347524] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 592.347524] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] self.driver.spawn(context, instance, image_meta, [ 592.347524] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 592.347524] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] self._vmops.spawn(context, instance, image_meta, injected_files, [ 592.347524] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 592.347524] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] vm_ref = self.build_virtual_machine(instance, [ 592.347524] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 592.347787] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] vif_infos = vmwarevif.get_vif_info(self._session, [ 592.347787] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 592.347787] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] for vif in network_info: [ 592.347787] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 592.347787] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] return self._sync_wrapper(fn, *args, **kwargs) [ 592.347787] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 592.347787] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] self.wait() [ 592.347787] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 592.347787] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] self[:] = self._gt.wait() [ 592.347787] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 592.347787] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] return self._exit_event.wait() [ 592.347787] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 592.347787] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] current.throw(*self._exc) [ 592.348154] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 592.348154] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] result = function(*args, **kwargs) [ 592.348154] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 592.348154] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] return func(*args, **kwargs) [ 592.348154] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 592.348154] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] raise e [ 592.348154] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 592.348154] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] nwinfo = self.network_api.allocate_for_instance( [ 592.348154] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 592.348154] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] created_port_ids = self._update_ports_for_instance( [ 592.348154] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 592.348154] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] with excutils.save_and_reraise_exception(): [ 592.348154] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 592.348428] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] self.force_reraise() [ 592.348428] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 592.348428] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] raise self.value [ 592.348428] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 592.348428] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] updated_port = self._update_port( [ 592.348428] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 592.348428] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] _ensure_no_port_binding_failure(port) [ 592.348428] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 592.348428] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] raise exception.PortBindingFailed(port_id=port['id']) [ 592.348428] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] nova.exception.PortBindingFailed: Binding failed for port 6c735bdb-9615-424c-b35d-b618b55a0ca8, please check neutron logs for more information. [ 592.348428] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] [ 592.348428] env[61273]: INFO nova.compute.manager [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] Terminating instance [ 592.350070] env[61273]: DEBUG oslo_concurrency.lockutils [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Acquiring lock "refresh_cache-7f8b08d4-3535-48ab-ba3f-a159511e2a64" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 592.350231] env[61273]: DEBUG oslo_concurrency.lockutils [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Acquired lock "refresh_cache-7f8b08d4-3535-48ab-ba3f-a159511e2a64" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 592.350417] env[61273]: DEBUG nova.network.neutron [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 592.350851] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Expecting reply to msg 2864dc80fd36471aa1e084e89f8a7365 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 592.358022] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2864dc80fd36471aa1e084e89f8a7365 [ 592.802046] env[61273]: DEBUG oslo_concurrency.lockutils [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.568s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 592.803226] env[61273]: DEBUG nova.compute.manager [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 592.804275] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] Expecting reply to msg cb535392d9f94669ac016b07aa4b6a75 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 592.805277] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 27.424s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 592.805460] env[61273]: DEBUG nova.objects.instance [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61273) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 592.806818] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Expecting reply to msg c3c3c986ff2a4fea938f72d7c6d4675f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 592.837873] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cb535392d9f94669ac016b07aa4b6a75 [ 592.848971] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c3c3c986ff2a4fea938f72d7c6d4675f [ 592.887867] env[61273]: DEBUG nova.compute.manager [req-3f283d9e-dfe3-4252-a35a-0f222db56caa req-839d5ea2-70d2-49b4-a0f3-8aab2e8cdb28 service nova] [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] Received event network-changed-6c735bdb-9615-424c-b35d-b618b55a0ca8 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 592.888067] env[61273]: DEBUG nova.compute.manager [req-3f283d9e-dfe3-4252-a35a-0f222db56caa req-839d5ea2-70d2-49b4-a0f3-8aab2e8cdb28 service nova] [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] Refreshing instance network info cache due to event network-changed-6c735bdb-9615-424c-b35d-b618b55a0ca8. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 592.888251] env[61273]: DEBUG oslo_concurrency.lockutils [req-3f283d9e-dfe3-4252-a35a-0f222db56caa req-839d5ea2-70d2-49b4-a0f3-8aab2e8cdb28 service nova] Acquiring lock "refresh_cache-7f8b08d4-3535-48ab-ba3f-a159511e2a64" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 592.889089] env[61273]: DEBUG nova.network.neutron [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 592.997892] env[61273]: DEBUG nova.network.neutron [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 592.998416] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Expecting reply to msg ee36328c16cd4a8caa208754c3bad20d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 593.007155] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ee36328c16cd4a8caa208754c3bad20d [ 593.309411] env[61273]: DEBUG nova.compute.utils [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 593.310185] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] Expecting reply to msg decd89c7d1684f838a32cd2df1e0c516 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 593.315245] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Expecting reply to msg 8f928692e4d34c8cbb0f4fcb0a9cfa4d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 593.316479] env[61273]: DEBUG nova.compute.manager [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 593.318042] env[61273]: DEBUG nova.network.neutron [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 593.322125] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg decd89c7d1684f838a32cd2df1e0c516 [ 593.322765] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8f928692e4d34c8cbb0f4fcb0a9cfa4d [ 593.323936] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 0.519s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 593.324645] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccd4ba1d-9601-49d3-9327-4721d6411aba tempest-ServersAdmin275Test-1388887324 tempest-ServersAdmin275Test-1388887324-project-admin] Expecting reply to msg 7970bdd945e54721bf9c8db25f435704 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 593.326006] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 27.570s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 593.328318] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] Expecting reply to msg b814ec0d9d3f4403af975645fedd5172 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 593.343459] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7970bdd945e54721bf9c8db25f435704 [ 593.373295] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b814ec0d9d3f4403af975645fedd5172 [ 593.389557] env[61273]: DEBUG nova.policy [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '77dead37e5de41e99014b84af587c7db', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3de9d2e84f5f44b785cadcfe5adcd3da', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 593.500404] env[61273]: DEBUG oslo_concurrency.lockutils [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Releasing lock "refresh_cache-7f8b08d4-3535-48ab-ba3f-a159511e2a64" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 593.500842] env[61273]: DEBUG nova.compute.manager [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 593.501048] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 593.501423] env[61273]: DEBUG oslo_concurrency.lockutils [req-3f283d9e-dfe3-4252-a35a-0f222db56caa req-839d5ea2-70d2-49b4-a0f3-8aab2e8cdb28 service nova] Acquired lock "refresh_cache-7f8b08d4-3535-48ab-ba3f-a159511e2a64" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 593.501635] env[61273]: DEBUG nova.network.neutron [req-3f283d9e-dfe3-4252-a35a-0f222db56caa req-839d5ea2-70d2-49b4-a0f3-8aab2e8cdb28 service nova] [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] Refreshing network info cache for port 6c735bdb-9615-424c-b35d-b618b55a0ca8 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 593.502085] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-3f283d9e-dfe3-4252-a35a-0f222db56caa req-839d5ea2-70d2-49b4-a0f3-8aab2e8cdb28 service nova] Expecting reply to msg 4a7401fdc83c470ea12ecb9e1c20b656 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 593.502890] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9e57490e-d368-462b-a7a6-b8a60b81da98 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.511386] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4a7401fdc83c470ea12ecb9e1c20b656 [ 593.514333] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b03373a-27ce-4bb8-8c6e-50348a6cec43 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.541980] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7f8b08d4-3535-48ab-ba3f-a159511e2a64 could not be found. [ 593.542223] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 593.542683] env[61273]: INFO nova.compute.manager [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] Took 0.04 seconds to destroy the instance on the hypervisor. [ 593.543001] env[61273]: DEBUG oslo.service.loopingcall [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 593.543232] env[61273]: DEBUG nova.compute.manager [-] [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 593.543319] env[61273]: DEBUG nova.network.neutron [-] [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 593.574897] env[61273]: DEBUG nova.network.neutron [-] [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 593.575491] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 224a4b549bf0421bacfe0b60032299b7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 593.585277] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 224a4b549bf0421bacfe0b60032299b7 [ 593.817572] env[61273]: DEBUG nova.compute.manager [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 593.819600] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] Expecting reply to msg 4acd839d998e42b190e598e8cfc4cb35 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 593.859978] env[61273]: DEBUG nova.network.neutron [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] Successfully created port: 8061659c-a94d-4f7d-a527-0f760ebc0807 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 593.868628] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4acd839d998e42b190e598e8cfc4cb35 [ 594.030829] env[61273]: DEBUG nova.network.neutron [req-3f283d9e-dfe3-4252-a35a-0f222db56caa req-839d5ea2-70d2-49b4-a0f3-8aab2e8cdb28 service nova] [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 594.079197] env[61273]: DEBUG nova.network.neutron [-] [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 594.079684] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg b9352c4bfb12410eb70c3d0c1b5cc6d3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 594.094754] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b9352c4bfb12410eb70c3d0c1b5cc6d3 [ 594.116742] env[61273]: DEBUG nova.network.neutron [req-3f283d9e-dfe3-4252-a35a-0f222db56caa req-839d5ea2-70d2-49b4-a0f3-8aab2e8cdb28 service nova] [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 594.117481] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-3f283d9e-dfe3-4252-a35a-0f222db56caa req-839d5ea2-70d2-49b4-a0f3-8aab2e8cdb28 service nova] Expecting reply to msg 599276f598ca40c6b5ef861e5787003c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 594.126697] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 599276f598ca40c6b5ef861e5787003c [ 594.323812] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] Expecting reply to msg b1c65a96282c49dfa656e1542e397d10 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 594.328872] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5b0a47e-70a3-4625-aeb9-e23551f3eaf3 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.338198] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a817b0d4-8f09-417f-8021-186fd186b03d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.373421] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b1c65a96282c49dfa656e1542e397d10 [ 594.374801] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d061d1e7-371a-4483-96cb-a61e7bd06566 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.386993] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab9cfcdb-79c7-44c7-8891-2ff9ae9e2c11 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.403595] env[61273]: DEBUG nova.compute.provider_tree [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 594.404527] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] Expecting reply to msg da14e6b34d1b4fa1a294fba36cfe469a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 594.412373] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg da14e6b34d1b4fa1a294fba36cfe469a [ 594.581786] env[61273]: INFO nova.compute.manager [-] [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] Took 1.04 seconds to deallocate network for instance. [ 594.584177] env[61273]: DEBUG nova.compute.claims [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 594.584362] env[61273]: DEBUG oslo_concurrency.lockutils [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 594.620200] env[61273]: DEBUG oslo_concurrency.lockutils [req-3f283d9e-dfe3-4252-a35a-0f222db56caa req-839d5ea2-70d2-49b4-a0f3-8aab2e8cdb28 service nova] Releasing lock "refresh_cache-7f8b08d4-3535-48ab-ba3f-a159511e2a64" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 594.620478] env[61273]: DEBUG nova.compute.manager [req-3f283d9e-dfe3-4252-a35a-0f222db56caa req-839d5ea2-70d2-49b4-a0f3-8aab2e8cdb28 service nova] [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] Received event network-vif-deleted-6c735bdb-9615-424c-b35d-b618b55a0ca8 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 594.833629] env[61273]: DEBUG nova.compute.manager [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 594.859651] env[61273]: DEBUG nova.virt.hardware [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 594.859935] env[61273]: DEBUG nova.virt.hardware [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 594.860138] env[61273]: DEBUG nova.virt.hardware [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 594.860328] env[61273]: DEBUG nova.virt.hardware [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 594.860467] env[61273]: DEBUG nova.virt.hardware [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 594.860614] env[61273]: DEBUG nova.virt.hardware [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 594.860857] env[61273]: DEBUG nova.virt.hardware [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 594.860976] env[61273]: DEBUG nova.virt.hardware [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 594.861139] env[61273]: DEBUG nova.virt.hardware [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 594.861297] env[61273]: DEBUG nova.virt.hardware [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 594.861470] env[61273]: DEBUG nova.virt.hardware [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 594.862355] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df81a956-2974-44d8-b37e-79aacf99e747 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.872253] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e18a91d-bfae-41a1-99c0-5b0773c4907d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.913149] env[61273]: DEBUG nova.scheduler.client.report [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 594.916324] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] Expecting reply to msg 04c0d7bf417b4e26b48e36149723e603 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 594.934189] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 04c0d7bf417b4e26b48e36149723e603 [ 594.988115] env[61273]: DEBUG nova.compute.manager [req-3e3f2fc7-d014-4f77-921c-03887584db6e req-168ec16f-88c9-460d-933c-5674d9e42fe4 service nova] [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] Received event network-changed-8061659c-a94d-4f7d-a527-0f760ebc0807 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 594.988115] env[61273]: DEBUG nova.compute.manager [req-3e3f2fc7-d014-4f77-921c-03887584db6e req-168ec16f-88c9-460d-933c-5674d9e42fe4 service nova] [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] Refreshing instance network info cache due to event network-changed-8061659c-a94d-4f7d-a527-0f760ebc0807. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 594.988115] env[61273]: DEBUG oslo_concurrency.lockutils [req-3e3f2fc7-d014-4f77-921c-03887584db6e req-168ec16f-88c9-460d-933c-5674d9e42fe4 service nova] Acquiring lock "refresh_cache-7d1f7566-8e5e-476c-9d19-49ed7b16c308" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 594.988115] env[61273]: DEBUG oslo_concurrency.lockutils [req-3e3f2fc7-d014-4f77-921c-03887584db6e req-168ec16f-88c9-460d-933c-5674d9e42fe4 service nova] Acquired lock "refresh_cache-7d1f7566-8e5e-476c-9d19-49ed7b16c308" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 594.988115] env[61273]: DEBUG nova.network.neutron [req-3e3f2fc7-d014-4f77-921c-03887584db6e req-168ec16f-88c9-460d-933c-5674d9e42fe4 service nova] [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] Refreshing network info cache for port 8061659c-a94d-4f7d-a527-0f760ebc0807 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 594.988336] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-3e3f2fc7-d014-4f77-921c-03887584db6e req-168ec16f-88c9-460d-933c-5674d9e42fe4 service nova] Expecting reply to msg fd659c1f8e7b4af8809953155d68ecbd in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 594.994185] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fd659c1f8e7b4af8809953155d68ecbd [ 595.019151] env[61273]: ERROR nova.compute.manager [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 8061659c-a94d-4f7d-a527-0f760ebc0807, please check neutron logs for more information. [ 595.019151] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 595.019151] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 595.019151] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 595.019151] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 595.019151] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 595.019151] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 595.019151] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 595.019151] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 595.019151] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 595.019151] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 595.019151] env[61273]: ERROR nova.compute.manager raise self.value [ 595.019151] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 595.019151] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 595.019151] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 595.019151] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 595.019658] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 595.019658] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 595.019658] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 8061659c-a94d-4f7d-a527-0f760ebc0807, please check neutron logs for more information. [ 595.019658] env[61273]: ERROR nova.compute.manager [ 595.019658] env[61273]: Traceback (most recent call last): [ 595.019658] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 595.019658] env[61273]: listener.cb(fileno) [ 595.019658] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 595.019658] env[61273]: result = function(*args, **kwargs) [ 595.019658] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 595.019658] env[61273]: return func(*args, **kwargs) [ 595.019658] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 595.019658] env[61273]: raise e [ 595.019658] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 595.019658] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 595.019658] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 595.019658] env[61273]: created_port_ids = self._update_ports_for_instance( [ 595.019658] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 595.019658] env[61273]: with excutils.save_and_reraise_exception(): [ 595.019658] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 595.019658] env[61273]: self.force_reraise() [ 595.019658] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 595.019658] env[61273]: raise self.value [ 595.019658] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 595.019658] env[61273]: updated_port = self._update_port( [ 595.019658] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 595.019658] env[61273]: _ensure_no_port_binding_failure(port) [ 595.019658] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 595.019658] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 595.020760] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 8061659c-a94d-4f7d-a527-0f760ebc0807, please check neutron logs for more information. [ 595.020760] env[61273]: Removing descriptor: 15 [ 595.020760] env[61273]: ERROR nova.compute.manager [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 8061659c-a94d-4f7d-a527-0f760ebc0807, please check neutron logs for more information. [ 595.020760] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] Traceback (most recent call last): [ 595.020760] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 595.020760] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] yield resources [ 595.020760] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 595.020760] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] self.driver.spawn(context, instance, image_meta, [ 595.020760] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 595.020760] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] self._vmops.spawn(context, instance, image_meta, injected_files, [ 595.020760] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 595.020760] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] vm_ref = self.build_virtual_machine(instance, [ 595.021155] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 595.021155] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] vif_infos = vmwarevif.get_vif_info(self._session, [ 595.021155] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 595.021155] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] for vif in network_info: [ 595.021155] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 595.021155] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] return self._sync_wrapper(fn, *args, **kwargs) [ 595.021155] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 595.021155] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] self.wait() [ 595.021155] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 595.021155] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] self[:] = self._gt.wait() [ 595.021155] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 595.021155] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] return self._exit_event.wait() [ 595.021155] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 595.021512] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] result = hub.switch() [ 595.021512] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 595.021512] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] return self.greenlet.switch() [ 595.021512] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 595.021512] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] result = function(*args, **kwargs) [ 595.021512] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 595.021512] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] return func(*args, **kwargs) [ 595.021512] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 595.021512] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] raise e [ 595.021512] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 595.021512] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] nwinfo = self.network_api.allocate_for_instance( [ 595.021512] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 595.021512] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] created_port_ids = self._update_ports_for_instance( [ 595.021917] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 595.021917] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] with excutils.save_and_reraise_exception(): [ 595.021917] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 595.021917] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] self.force_reraise() [ 595.021917] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 595.021917] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] raise self.value [ 595.021917] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 595.021917] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] updated_port = self._update_port( [ 595.021917] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 595.021917] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] _ensure_no_port_binding_failure(port) [ 595.021917] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 595.021917] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] raise exception.PortBindingFailed(port_id=port['id']) [ 595.022216] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] nova.exception.PortBindingFailed: Binding failed for port 8061659c-a94d-4f7d-a527-0f760ebc0807, please check neutron logs for more information. [ 595.022216] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] [ 595.022216] env[61273]: INFO nova.compute.manager [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] Terminating instance [ 595.023489] env[61273]: DEBUG oslo_concurrency.lockutils [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] Acquiring lock "refresh_cache-7d1f7566-8e5e-476c-9d19-49ed7b16c308" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 595.418863] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.093s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 595.419575] env[61273]: ERROR nova.compute.manager [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port d803f7f9-4edd-478d-8efa-d1db78feae38, please check neutron logs for more information. [ 595.419575] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] Traceback (most recent call last): [ 595.419575] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 595.419575] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] self.driver.spawn(context, instance, image_meta, [ 595.419575] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 595.419575] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 595.419575] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 595.419575] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] vm_ref = self.build_virtual_machine(instance, [ 595.419575] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 595.419575] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] vif_infos = vmwarevif.get_vif_info(self._session, [ 595.419575] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 595.419889] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] for vif in network_info: [ 595.419889] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 595.419889] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] return self._sync_wrapper(fn, *args, **kwargs) [ 595.419889] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 595.419889] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] self.wait() [ 595.419889] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 595.419889] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] self[:] = self._gt.wait() [ 595.419889] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 595.419889] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] return self._exit_event.wait() [ 595.419889] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 595.419889] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] result = hub.switch() [ 595.419889] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 595.419889] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] return self.greenlet.switch() [ 595.420287] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 595.420287] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] result = function(*args, **kwargs) [ 595.420287] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 595.420287] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] return func(*args, **kwargs) [ 595.420287] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 595.420287] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] raise e [ 595.420287] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 595.420287] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] nwinfo = self.network_api.allocate_for_instance( [ 595.420287] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 595.420287] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] created_port_ids = self._update_ports_for_instance( [ 595.420287] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 595.420287] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] with excutils.save_and_reraise_exception(): [ 595.420287] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 595.420654] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] self.force_reraise() [ 595.420654] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 595.420654] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] raise self.value [ 595.420654] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 595.420654] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] updated_port = self._update_port( [ 595.420654] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 595.420654] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] _ensure_no_port_binding_failure(port) [ 595.420654] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 595.420654] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] raise exception.PortBindingFailed(port_id=port['id']) [ 595.420654] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] nova.exception.PortBindingFailed: Binding failed for port d803f7f9-4edd-478d-8efa-d1db78feae38, please check neutron logs for more information. [ 595.420654] env[61273]: ERROR nova.compute.manager [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] [ 595.420944] env[61273]: DEBUG nova.compute.utils [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] Binding failed for port d803f7f9-4edd-478d-8efa-d1db78feae38, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 595.421480] env[61273]: DEBUG oslo_concurrency.lockutils [None req-759d0da7-f412-4fdf-a466-01ca69876e62 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.250s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 595.422224] env[61273]: DEBUG nova.objects.instance [None req-759d0da7-f412-4fdf-a466-01ca69876e62 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Lazy-loading 'resources' on Instance uuid 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2 {{(pid=61273) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 595.422224] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-759d0da7-f412-4fdf-a466-01ca69876e62 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Expecting reply to msg 898154a1f96f434ebc4a159fc93794e3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 595.426279] env[61273]: DEBUG nova.compute.manager [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] Build of instance 1d64d913-45f0-4768-8375-7863d9ae43c3 was re-scheduled: Binding failed for port d803f7f9-4edd-478d-8efa-d1db78feae38, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 595.426342] env[61273]: DEBUG nova.compute.manager [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 595.430213] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] Acquiring lock "refresh_cache-1d64d913-45f0-4768-8375-7863d9ae43c3" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 595.430372] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] Acquired lock "refresh_cache-1d64d913-45f0-4768-8375-7863d9ae43c3" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 595.430570] env[61273]: DEBUG nova.network.neutron [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 595.430981] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] Expecting reply to msg 14e781e7837249b7a708c5721c9db719 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 595.432460] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 898154a1f96f434ebc4a159fc93794e3 [ 595.436933] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 14e781e7837249b7a708c5721c9db719 [ 595.508914] env[61273]: DEBUG nova.network.neutron [req-3e3f2fc7-d014-4f77-921c-03887584db6e req-168ec16f-88c9-460d-933c-5674d9e42fe4 service nova] [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 595.560607] env[61273]: DEBUG nova.network.neutron [req-3e3f2fc7-d014-4f77-921c-03887584db6e req-168ec16f-88c9-460d-933c-5674d9e42fe4 service nova] [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 595.561071] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-3e3f2fc7-d014-4f77-921c-03887584db6e req-168ec16f-88c9-460d-933c-5674d9e42fe4 service nova] Expecting reply to msg 0349cf9f49194c4bb3a33836c3edc3b2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 595.569621] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0349cf9f49194c4bb3a33836c3edc3b2 [ 595.953594] env[61273]: DEBUG nova.network.neutron [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 596.045088] env[61273]: DEBUG nova.network.neutron [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 596.045088] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] Expecting reply to msg dc31704cef7d482a9c7e7027e3bddeaa in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 596.056386] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dc31704cef7d482a9c7e7027e3bddeaa [ 596.065488] env[61273]: DEBUG oslo_concurrency.lockutils [req-3e3f2fc7-d014-4f77-921c-03887584db6e req-168ec16f-88c9-460d-933c-5674d9e42fe4 service nova] Releasing lock "refresh_cache-7d1f7566-8e5e-476c-9d19-49ed7b16c308" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 596.066302] env[61273]: DEBUG oslo_concurrency.lockutils [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] Acquired lock "refresh_cache-7d1f7566-8e5e-476c-9d19-49ed7b16c308" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 596.066482] env[61273]: DEBUG nova.network.neutron [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 596.066897] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] Expecting reply to msg 00092ac373ca4680a28b3f36607ed050 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 596.074311] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 00092ac373ca4680a28b3f36607ed050 [ 596.313600] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-870470f0-8723-4a92-8468-126b4ed55c5e {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.322056] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb817306-c693-4b64-bcbc-61193253fbef {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.352798] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69554808-c672-46d6-b3cf-47ad1f17b04f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.361507] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca479b62-9ac1-43fb-813e-a4eedb9e37fe {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.376247] env[61273]: DEBUG nova.compute.provider_tree [None req-759d0da7-f412-4fdf-a466-01ca69876e62 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 596.377004] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-759d0da7-f412-4fdf-a466-01ca69876e62 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Expecting reply to msg 42e5e88e5065443ebc53e19615f2df5d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 596.384404] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 42e5e88e5065443ebc53e19615f2df5d [ 596.488826] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Acquiring lock "3635532a-2af3-4ef5-a922-37fc763c9708" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 596.489073] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Lock "3635532a-2af3-4ef5-a922-37fc763c9708" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 596.547245] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] Releasing lock "refresh_cache-1d64d913-45f0-4768-8375-7863d9ae43c3" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 596.547636] env[61273]: DEBUG nova.compute.manager [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 596.547969] env[61273]: DEBUG nova.compute.manager [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 596.548281] env[61273]: DEBUG nova.network.neutron [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 596.564089] env[61273]: DEBUG nova.network.neutron [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 596.564642] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] Expecting reply to msg 1e1bab7dd29b4cb69e7701f2ba52a663 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 596.572983] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1e1bab7dd29b4cb69e7701f2ba52a663 [ 596.581232] env[61273]: DEBUG nova.network.neutron [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 596.626875] env[61273]: DEBUG nova.network.neutron [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 596.627387] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] Expecting reply to msg 98043078f55a469b99da066ae0ee3d04 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 596.636327] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 98043078f55a469b99da066ae0ee3d04 [ 596.880204] env[61273]: DEBUG nova.scheduler.client.report [None req-759d0da7-f412-4fdf-a466-01ca69876e62 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 596.882742] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-759d0da7-f412-4fdf-a466-01ca69876e62 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Expecting reply to msg 2c1f322db19a42a48ea3739f2bdcfb13 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 596.894238] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2c1f322db19a42a48ea3739f2bdcfb13 [ 597.035219] env[61273]: DEBUG nova.compute.manager [req-87a635d8-579c-4bee-9446-679b154c3ad2 req-72686f0d-4f2e-4fab-88e8-57884d4c8190 service nova] [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] Received event network-vif-deleted-8061659c-a94d-4f7d-a527-0f760ebc0807 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 597.068298] env[61273]: DEBUG nova.network.neutron [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 597.068825] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] Expecting reply to msg 684aa7c31ac1465f850b5806c582f8e8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 597.080212] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 684aa7c31ac1465f850b5806c582f8e8 [ 597.129796] env[61273]: DEBUG oslo_concurrency.lockutils [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] Releasing lock "refresh_cache-7d1f7566-8e5e-476c-9d19-49ed7b16c308" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 597.130221] env[61273]: DEBUG nova.compute.manager [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 597.130415] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 597.130706] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-33aa98bc-22f4-4e00-9b1a-c57890ca3456 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.141307] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-562cfa26-b1cf-4088-8c5f-66a470099a2b {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.170120] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7d1f7566-8e5e-476c-9d19-49ed7b16c308 could not be found. [ 597.170375] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 597.170556] env[61273]: INFO nova.compute.manager [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] Took 0.04 seconds to destroy the instance on the hypervisor. [ 597.170805] env[61273]: DEBUG oslo.service.loopingcall [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 597.171012] env[61273]: DEBUG nova.compute.manager [-] [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 597.171107] env[61273]: DEBUG nova.network.neutron [-] [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 597.186724] env[61273]: DEBUG nova.network.neutron [-] [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 597.187296] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 3ed3ea68051a4fe9af64a4b576c38af0 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 597.194848] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3ed3ea68051a4fe9af64a4b576c38af0 [ 597.385193] env[61273]: DEBUG oslo_concurrency.lockutils [None req-759d0da7-f412-4fdf-a466-01ca69876e62 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.964s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 597.387692] env[61273]: DEBUG oslo_concurrency.lockutils [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.791s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 597.389259] env[61273]: INFO nova.compute.claims [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 597.390889] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 8543a28f98054f2e983eaec9e29b7222 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 597.410831] env[61273]: INFO nova.scheduler.client.report [None req-759d0da7-f412-4fdf-a466-01ca69876e62 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Deleted allocations for instance 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2 [ 597.414370] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-759d0da7-f412-4fdf-a466-01ca69876e62 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Expecting reply to msg 30cc90e466744e819da959352244dcfc in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 597.435187] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8543a28f98054f2e983eaec9e29b7222 [ 597.462504] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 30cc90e466744e819da959352244dcfc [ 597.570637] env[61273]: INFO nova.compute.manager [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] [instance: 1d64d913-45f0-4768-8375-7863d9ae43c3] Took 1.02 seconds to deallocate network for instance. [ 597.572529] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] Expecting reply to msg 84f07ecd8b8143ee83338290cf7a2b96 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 597.604906] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 84f07ecd8b8143ee83338290cf7a2b96 [ 597.690884] env[61273]: DEBUG nova.network.neutron [-] [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 597.691388] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 829fc10dfa404a74a2d00dfefe53c827 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 597.700221] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 829fc10dfa404a74a2d00dfefe53c827 [ 597.896066] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 7ea3e27d72864d01a0016397a23edd2d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 597.905460] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7ea3e27d72864d01a0016397a23edd2d [ 597.920064] env[61273]: DEBUG oslo_concurrency.lockutils [None req-759d0da7-f412-4fdf-a466-01ca69876e62 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Lock "4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.594s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 597.920411] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-759d0da7-f412-4fdf-a466-01ca69876e62 tempest-ServersAdmin275Test-2057105880 tempest-ServersAdmin275Test-2057105880-project-member] Expecting reply to msg 2c24b63d202c4c9ca470e34287d8b723 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 597.934554] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2c24b63d202c4c9ca470e34287d8b723 [ 598.076809] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] Expecting reply to msg 5f53c2ed471f4b86ad179844166c9068 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 598.110765] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5f53c2ed471f4b86ad179844166c9068 [ 598.194108] env[61273]: INFO nova.compute.manager [-] [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] Took 1.02 seconds to deallocate network for instance. [ 598.196689] env[61273]: DEBUG nova.compute.claims [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 598.196894] env[61273]: DEBUG oslo_concurrency.lockutils [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 598.596355] env[61273]: INFO nova.scheduler.client.report [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] Deleted allocations for instance 1d64d913-45f0-4768-8375-7863d9ae43c3 [ 598.603011] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] Expecting reply to msg 4e5181306dfe4f928d04ec3144d4cce8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 598.619408] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4e5181306dfe4f928d04ec3144d4cce8 [ 598.745220] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f723f20-d853-46d0-8235-72b4e210e693 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.755416] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8d4342c-051f-4568-a308-d3df8238c880 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.785237] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bb3d08d-12f1-4385-89c5-c00418da5eee {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.793931] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41e30d0b-1eac-41c2-a253-8303e26675dd {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.807673] env[61273]: DEBUG nova.compute.provider_tree [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 598.808229] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 852660d039044065b81da624ca7e19d1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 598.815449] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 852660d039044065b81da624ca7e19d1 [ 599.111746] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ad704dec-d447-49f2-84f8-39ab723d8651 tempest-VolumesAssistedSnapshotsTest-325362177 tempest-VolumesAssistedSnapshotsTest-325362177-project-member] Lock "1d64d913-45f0-4768-8375-7863d9ae43c3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.143s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 599.112381] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 415adb4a30234f2c982445f2e2058840 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 599.129539] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 415adb4a30234f2c982445f2e2058840 [ 599.311638] env[61273]: DEBUG nova.scheduler.client.report [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 599.314625] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 4c1b2aa0df1c44b882f701b30adabdc3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 599.325229] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4c1b2aa0df1c44b882f701b30adabdc3 [ 599.615078] env[61273]: DEBUG nova.compute.manager [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 599.616842] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg cfff158d2a3f46079ed0304ff6fdbedd in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 599.656745] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cfff158d2a3f46079ed0304ff6fdbedd [ 599.816386] env[61273]: DEBUG oslo_concurrency.lockutils [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.429s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 599.816948] env[61273]: DEBUG nova.compute.manager [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 599.818616] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg da3544d2d67947f094b427cf59301974 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 599.819641] env[61273]: DEBUG oslo_concurrency.lockutils [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 21.266s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 599.821302] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] Expecting reply to msg f6aa0476b10d42e8b987e62c8abe5991 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 599.857223] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg da3544d2d67947f094b427cf59301974 [ 599.865511] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f6aa0476b10d42e8b987e62c8abe5991 [ 600.141209] env[61273]: DEBUG oslo_concurrency.lockutils [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 600.324489] env[61273]: DEBUG nova.compute.utils [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 600.325173] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 3979ce35280a465b9682e5394e7f9e88 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 600.330838] env[61273]: DEBUG nova.compute.manager [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 600.330838] env[61273]: DEBUG nova.network.neutron [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 600.345100] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3979ce35280a465b9682e5394e7f9e88 [ 600.392251] env[61273]: DEBUG nova.policy [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c471664da5894985bf7478057ea19b73', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a3de421e0f994df8b809ce0096753f23', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 600.521947] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 13f3d82de4b2427f824984f310f8b98a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 600.532028] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 13f3d82de4b2427f824984f310f8b98a [ 600.724878] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0778a223-d00f-4795-91f8-749abf1410bd {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.733886] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fbf5540-c099-481e-9466-3ef3449d2910 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.764097] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f58e08c-9f33-4095-a850-093189e7bef9 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.776246] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1571cf35-c6e6-4b01-a066-64ca972daa9a {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.793558] env[61273]: DEBUG nova.compute.provider_tree [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 600.794184] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] Expecting reply to msg cfa15b1f46904e0fa4f19e919c442c66 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 600.802325] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cfa15b1f46904e0fa4f19e919c442c66 [ 600.829548] env[61273]: DEBUG nova.compute.manager [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 600.831481] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 0fdcc6a6286a490c97892fb471cce698 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 600.866574] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0fdcc6a6286a490c97892fb471cce698 [ 601.248367] env[61273]: DEBUG nova.network.neutron [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] Successfully created port: eca6b818-5b64-4a66-a817-2488dc457863 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 601.297065] env[61273]: DEBUG nova.scheduler.client.report [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 601.299528] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] Expecting reply to msg ceb56e6f85b44424a6ae2e878df5257d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 601.311833] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ceb56e6f85b44424a6ae2e878df5257d [ 601.336377] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 80081a41a1bf4847b41659e74c08dced in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 601.389117] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 80081a41a1bf4847b41659e74c08dced [ 601.809672] env[61273]: DEBUG oslo_concurrency.lockutils [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.990s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 601.810402] env[61273]: ERROR nova.compute.manager [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port bc737d73-0b13-4405-8b56-e8520c00a00f, please check neutron logs for more information. [ 601.810402] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] Traceback (most recent call last): [ 601.810402] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 601.810402] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] self.driver.spawn(context, instance, image_meta, [ 601.810402] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 601.810402] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] self._vmops.spawn(context, instance, image_meta, injected_files, [ 601.810402] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 601.810402] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] vm_ref = self.build_virtual_machine(instance, [ 601.810402] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 601.810402] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] vif_infos = vmwarevif.get_vif_info(self._session, [ 601.810402] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 601.810670] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] for vif in network_info: [ 601.810670] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 601.810670] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] return self._sync_wrapper(fn, *args, **kwargs) [ 601.810670] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 601.810670] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] self.wait() [ 601.810670] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 601.810670] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] self[:] = self._gt.wait() [ 601.810670] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 601.810670] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] return self._exit_event.wait() [ 601.810670] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 601.810670] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] result = hub.switch() [ 601.810670] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 601.810670] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] return self.greenlet.switch() [ 601.811011] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 601.811011] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] result = function(*args, **kwargs) [ 601.811011] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 601.811011] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] return func(*args, **kwargs) [ 601.811011] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 601.811011] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] raise e [ 601.811011] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 601.811011] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] nwinfo = self.network_api.allocate_for_instance( [ 601.811011] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 601.811011] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] created_port_ids = self._update_ports_for_instance( [ 601.811011] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 601.811011] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] with excutils.save_and_reraise_exception(): [ 601.811011] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 601.811294] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] self.force_reraise() [ 601.811294] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 601.811294] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] raise self.value [ 601.811294] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 601.811294] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] updated_port = self._update_port( [ 601.811294] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 601.811294] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] _ensure_no_port_binding_failure(port) [ 601.811294] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 601.811294] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] raise exception.PortBindingFailed(port_id=port['id']) [ 601.811294] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] nova.exception.PortBindingFailed: Binding failed for port bc737d73-0b13-4405-8b56-e8520c00a00f, please check neutron logs for more information. [ 601.811294] env[61273]: ERROR nova.compute.manager [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] [ 601.811535] env[61273]: DEBUG nova.compute.utils [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] Binding failed for port bc737d73-0b13-4405-8b56-e8520c00a00f, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 601.812614] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.754s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 601.814145] env[61273]: INFO nova.compute.claims [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] [instance: 1804f229-97b9-4ee3-933d-715431a900f8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 601.815741] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] Expecting reply to msg a202bf1c962242a19abf3da2212fec71 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 601.820305] env[61273]: DEBUG nova.compute.manager [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] Build of instance f0c26eb3-e6d6-4d9f-9f07-5add9de6d126 was re-scheduled: Binding failed for port bc737d73-0b13-4405-8b56-e8520c00a00f, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 601.820837] env[61273]: DEBUG nova.compute.manager [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 601.821071] env[61273]: DEBUG oslo_concurrency.lockutils [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] Acquiring lock "refresh_cache-f0c26eb3-e6d6-4d9f-9f07-5add9de6d126" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 601.821216] env[61273]: DEBUG oslo_concurrency.lockutils [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] Acquired lock "refresh_cache-f0c26eb3-e6d6-4d9f-9f07-5add9de6d126" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 601.821383] env[61273]: DEBUG nova.network.neutron [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 601.821875] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] Expecting reply to msg bf34a392a633415984165c9f445d85b7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 601.833701] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bf34a392a633415984165c9f445d85b7 [ 601.839775] env[61273]: DEBUG nova.compute.manager [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 601.848747] env[61273]: DEBUG oslo_concurrency.lockutils [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Acquiring lock "144c3c21-b18e-4997-a241-8ff21a3b4835" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 601.848747] env[61273]: DEBUG oslo_concurrency.lockutils [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Lock "144c3c21-b18e-4997-a241-8ff21a3b4835" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 601.867844] env[61273]: DEBUG nova.virt.hardware [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 601.868079] env[61273]: DEBUG nova.virt.hardware [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 601.868233] env[61273]: DEBUG nova.virt.hardware [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 601.868408] env[61273]: DEBUG nova.virt.hardware [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 601.868557] env[61273]: DEBUG nova.virt.hardware [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 601.868700] env[61273]: DEBUG nova.virt.hardware [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 601.868902] env[61273]: DEBUG nova.virt.hardware [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 601.869107] env[61273]: DEBUG nova.virt.hardware [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 601.869310] env[61273]: DEBUG nova.virt.hardware [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 601.869494] env[61273]: DEBUG nova.virt.hardware [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 601.869755] env[61273]: DEBUG nova.virt.hardware [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 601.871264] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8d49d57-1f02-4a6f-bf91-5cfe52391d74 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.881196] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bfae05d-b69c-40ce-bf1f-c43bfeeec814 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.886415] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a202bf1c962242a19abf3da2212fec71 [ 602.325416] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] Expecting reply to msg 97d99d495f5949129f424d3320ac4d89 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 602.338968] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 97d99d495f5949129f424d3320ac4d89 [ 602.353176] env[61273]: DEBUG nova.network.neutron [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 602.497365] env[61273]: DEBUG nova.network.neutron [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 602.497906] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] Expecting reply to msg 69d3e4ecf1e14567a5dd44b894e11832 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 602.508172] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 69d3e4ecf1e14567a5dd44b894e11832 [ 602.768478] env[61273]: DEBUG nova.compute.manager [req-13dca20b-6010-4518-8063-301c59d82790 req-d88ef63c-1c14-4f17-aa8f-49237e18ffbc service nova] [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] Received event network-changed-eca6b818-5b64-4a66-a817-2488dc457863 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 602.768668] env[61273]: DEBUG nova.compute.manager [req-13dca20b-6010-4518-8063-301c59d82790 req-d88ef63c-1c14-4f17-aa8f-49237e18ffbc service nova] [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] Refreshing instance network info cache due to event network-changed-eca6b818-5b64-4a66-a817-2488dc457863. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 602.768884] env[61273]: DEBUG oslo_concurrency.lockutils [req-13dca20b-6010-4518-8063-301c59d82790 req-d88ef63c-1c14-4f17-aa8f-49237e18ffbc service nova] Acquiring lock "refresh_cache-1d84bcbd-9b55-4d6f-b6c7-24391c8600a7" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 602.769020] env[61273]: DEBUG oslo_concurrency.lockutils [req-13dca20b-6010-4518-8063-301c59d82790 req-d88ef63c-1c14-4f17-aa8f-49237e18ffbc service nova] Acquired lock "refresh_cache-1d84bcbd-9b55-4d6f-b6c7-24391c8600a7" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 602.769169] env[61273]: DEBUG nova.network.neutron [req-13dca20b-6010-4518-8063-301c59d82790 req-d88ef63c-1c14-4f17-aa8f-49237e18ffbc service nova] [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] Refreshing network info cache for port eca6b818-5b64-4a66-a817-2488dc457863 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 602.769710] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-13dca20b-6010-4518-8063-301c59d82790 req-d88ef63c-1c14-4f17-aa8f-49237e18ffbc service nova] Expecting reply to msg d38a97ebd0c64323848a9c4875bac432 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 602.777998] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d38a97ebd0c64323848a9c4875bac432 [ 602.857159] env[61273]: ERROR nova.compute.manager [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port eca6b818-5b64-4a66-a817-2488dc457863, please check neutron logs for more information. [ 602.857159] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 602.857159] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 602.857159] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 602.857159] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 602.857159] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 602.857159] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 602.857159] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 602.857159] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 602.857159] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 602.857159] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 602.857159] env[61273]: ERROR nova.compute.manager raise self.value [ 602.857159] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 602.857159] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 602.857159] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 602.857159] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 602.857736] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 602.857736] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 602.857736] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port eca6b818-5b64-4a66-a817-2488dc457863, please check neutron logs for more information. [ 602.857736] env[61273]: ERROR nova.compute.manager [ 602.857736] env[61273]: Traceback (most recent call last): [ 602.857736] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 602.857736] env[61273]: listener.cb(fileno) [ 602.857736] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 602.857736] env[61273]: result = function(*args, **kwargs) [ 602.857736] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 602.857736] env[61273]: return func(*args, **kwargs) [ 602.857736] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 602.857736] env[61273]: raise e [ 602.857736] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 602.857736] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 602.857736] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 602.857736] env[61273]: created_port_ids = self._update_ports_for_instance( [ 602.857736] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 602.857736] env[61273]: with excutils.save_and_reraise_exception(): [ 602.857736] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 602.857736] env[61273]: self.force_reraise() [ 602.857736] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 602.857736] env[61273]: raise self.value [ 602.857736] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 602.857736] env[61273]: updated_port = self._update_port( [ 602.857736] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 602.857736] env[61273]: _ensure_no_port_binding_failure(port) [ 602.857736] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 602.857736] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 602.858411] env[61273]: nova.exception.PortBindingFailed: Binding failed for port eca6b818-5b64-4a66-a817-2488dc457863, please check neutron logs for more information. [ 602.858411] env[61273]: Removing descriptor: 15 [ 602.858411] env[61273]: ERROR nova.compute.manager [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port eca6b818-5b64-4a66-a817-2488dc457863, please check neutron logs for more information. [ 602.858411] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] Traceback (most recent call last): [ 602.858411] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 602.858411] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] yield resources [ 602.858411] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 602.858411] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] self.driver.spawn(context, instance, image_meta, [ 602.858411] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 602.858411] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 602.858411] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 602.858411] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] vm_ref = self.build_virtual_machine(instance, [ 602.858705] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 602.858705] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] vif_infos = vmwarevif.get_vif_info(self._session, [ 602.858705] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 602.858705] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] for vif in network_info: [ 602.858705] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 602.858705] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] return self._sync_wrapper(fn, *args, **kwargs) [ 602.858705] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 602.858705] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] self.wait() [ 602.858705] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 602.858705] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] self[:] = self._gt.wait() [ 602.858705] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 602.858705] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] return self._exit_event.wait() [ 602.858705] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 602.859023] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] result = hub.switch() [ 602.859023] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 602.859023] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] return self.greenlet.switch() [ 602.859023] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 602.859023] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] result = function(*args, **kwargs) [ 602.859023] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 602.859023] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] return func(*args, **kwargs) [ 602.859023] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 602.859023] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] raise e [ 602.859023] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 602.859023] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] nwinfo = self.network_api.allocate_for_instance( [ 602.859023] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 602.859023] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] created_port_ids = self._update_ports_for_instance( [ 602.859364] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 602.859364] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] with excutils.save_and_reraise_exception(): [ 602.859364] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 602.859364] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] self.force_reraise() [ 602.859364] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 602.859364] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] raise self.value [ 602.859364] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 602.859364] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] updated_port = self._update_port( [ 602.859364] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 602.859364] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] _ensure_no_port_binding_failure(port) [ 602.859364] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 602.859364] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] raise exception.PortBindingFailed(port_id=port['id']) [ 602.859677] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] nova.exception.PortBindingFailed: Binding failed for port eca6b818-5b64-4a66-a817-2488dc457863, please check neutron logs for more information. [ 602.859677] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] [ 602.859677] env[61273]: INFO nova.compute.manager [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] Terminating instance [ 602.860335] env[61273]: DEBUG oslo_concurrency.lockutils [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Acquiring lock "refresh_cache-1d84bcbd-9b55-4d6f-b6c7-24391c8600a7" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 603.000945] env[61273]: DEBUG oslo_concurrency.lockutils [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] Releasing lock "refresh_cache-f0c26eb3-e6d6-4d9f-9f07-5add9de6d126" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 603.001127] env[61273]: DEBUG nova.compute.manager [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 603.001303] env[61273]: DEBUG nova.compute.manager [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 603.001463] env[61273]: DEBUG nova.network.neutron [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 603.029817] env[61273]: DEBUG nova.network.neutron [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 603.030453] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] Expecting reply to msg 5b1144dc6ebe4a8c8a0254804994b6ce in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 603.386543] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5b1144dc6ebe4a8c8a0254804994b6ce [ 603.411623] env[61273]: DEBUG nova.network.neutron [req-13dca20b-6010-4518-8063-301c59d82790 req-d88ef63c-1c14-4f17-aa8f-49237e18ffbc service nova] [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 603.532739] env[61273]: DEBUG nova.network.neutron [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 603.533913] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] Expecting reply to msg 3312fc6b72694b4dba42d9e0eb729540 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 603.537064] env[61273]: DEBUG nova.network.neutron [req-13dca20b-6010-4518-8063-301c59d82790 req-d88ef63c-1c14-4f17-aa8f-49237e18ffbc service nova] [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 603.537064] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-13dca20b-6010-4518-8063-301c59d82790 req-d88ef63c-1c14-4f17-aa8f-49237e18ffbc service nova] Expecting reply to msg 2881c6a4ca6949d2aa3f119f69f07c32 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 603.543022] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3312fc6b72694b4dba42d9e0eb729540 [ 603.545467] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2881c6a4ca6949d2aa3f119f69f07c32 [ 603.622115] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efc1800b-e379-418f-aa05-8ac2fbccec2b {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.633929] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efbe47cf-ded1-4e5e-baa8-64063e08bba4 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.687703] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7d95657-57f6-41c6-b786-42aa1e511cc9 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.700238] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c3c69fc-7dee-42b0-8edf-ba55e9a07154 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.723495] env[61273]: DEBUG nova.compute.provider_tree [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 603.724303] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] Expecting reply to msg 6659bec5ba634344baa5d05ac6728ac9 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 603.733249] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6659bec5ba634344baa5d05ac6728ac9 [ 604.038749] env[61273]: INFO nova.compute.manager [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] [instance: f0c26eb3-e6d6-4d9f-9f07-5add9de6d126] Took 1.04 seconds to deallocate network for instance. [ 604.041032] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] Expecting reply to msg 429debbc8ac044929e9a9a0cb95be9da in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 604.047022] env[61273]: DEBUG oslo_concurrency.lockutils [req-13dca20b-6010-4518-8063-301c59d82790 req-d88ef63c-1c14-4f17-aa8f-49237e18ffbc service nova] Releasing lock "refresh_cache-1d84bcbd-9b55-4d6f-b6c7-24391c8600a7" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 604.047022] env[61273]: DEBUG oslo_concurrency.lockutils [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Acquired lock "refresh_cache-1d84bcbd-9b55-4d6f-b6c7-24391c8600a7" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 604.047022] env[61273]: DEBUG nova.network.neutron [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 604.047022] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 1ef80322e7d4493ebeb498cfe27626da in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 604.057055] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1ef80322e7d4493ebeb498cfe27626da [ 604.087084] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 429debbc8ac044929e9a9a0cb95be9da [ 604.230687] env[61273]: DEBUG nova.scheduler.client.report [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 604.230687] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] Expecting reply to msg 8c757f35e8874056a38d506611b1b5e5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 604.244911] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8c757f35e8874056a38d506611b1b5e5 [ 604.554150] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] Expecting reply to msg e559e85fe57b4cb5bd49ae61aadb883b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 604.576272] env[61273]: DEBUG nova.network.neutron [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 604.587912] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e559e85fe57b4cb5bd49ae61aadb883b [ 604.694571] env[61273]: DEBUG nova.network.neutron [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 604.694571] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 9336f5f492104373a8d7bfbc5fe0af4d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 604.707583] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9336f5f492104373a8d7bfbc5fe0af4d [ 604.736079] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.920s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 604.736079] env[61273]: DEBUG nova.compute.manager [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] [instance: 1804f229-97b9-4ee3-933d-715431a900f8] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 604.736079] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] Expecting reply to msg b32ea74d18364c1cbcd65b52cf5b7cc8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 604.736491] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c8f1f2f5-5a51-4e1d-adb8-274558f0667c tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.540s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 604.736752] env[61273]: DEBUG nova.objects.instance [None req-c8f1f2f5-5a51-4e1d-adb8-274558f0667c tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Lazy-loading 'resources' on Instance uuid 62c3b24d-bee7-4dd2-a6c7-9303c7c28cca {{(pid=61273) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 604.737128] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c8f1f2f5-5a51-4e1d-adb8-274558f0667c tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Expecting reply to msg 4f1bbefd221d48f591697c3a9b39b060 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 604.746455] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4f1bbefd221d48f591697c3a9b39b060 [ 604.788831] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b32ea74d18364c1cbcd65b52cf5b7cc8 [ 604.951401] env[61273]: DEBUG nova.compute.manager [req-fe4f2321-705c-4247-b8c0-591e57ffa44d req-b8f46778-8466-47a1-a70b-41469d142132 service nova] [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] Received event network-vif-deleted-eca6b818-5b64-4a66-a817-2488dc457863 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 605.068181] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Acquiring lock "8d63e0a8-85a1-400b-a6f0-8e87c7945655" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 605.068441] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Lock "8d63e0a8-85a1-400b-a6f0-8e87c7945655" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 605.087287] env[61273]: INFO nova.scheduler.client.report [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] Deleted allocations for instance f0c26eb3-e6d6-4d9f-9f07-5add9de6d126 [ 605.093304] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] Expecting reply to msg 6d65060465294031b37a6fdd332ea0d4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 605.124332] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6d65060465294031b37a6fdd332ea0d4 [ 605.200301] env[61273]: DEBUG oslo_concurrency.lockutils [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Releasing lock "refresh_cache-1d84bcbd-9b55-4d6f-b6c7-24391c8600a7" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 605.200301] env[61273]: DEBUG nova.compute.manager [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 605.200301] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 605.200301] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5284765e-cf35-4e9e-9066-02a1ee103a86 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.210372] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e709fe4-1774-4bdc-87a6-7d950e171bdc {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.240052] env[61273]: DEBUG nova.compute.utils [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 605.240695] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] Expecting reply to msg 8b82178f336a4adfb828b77290d9bafa in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 605.244266] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7 could not be found. [ 605.244484] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 605.244661] env[61273]: INFO nova.compute.manager [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] Took 0.05 seconds to destroy the instance on the hypervisor. [ 605.244904] env[61273]: DEBUG oslo.service.loopingcall [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 605.246090] env[61273]: DEBUG nova.compute.manager [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] [instance: 1804f229-97b9-4ee3-933d-715431a900f8] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 605.246090] env[61273]: DEBUG nova.network.neutron [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] [instance: 1804f229-97b9-4ee3-933d-715431a900f8] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 605.247574] env[61273]: DEBUG nova.compute.manager [-] [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 605.247673] env[61273]: DEBUG nova.network.neutron [-] [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 605.253515] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8b82178f336a4adfb828b77290d9bafa [ 605.490536] env[61273]: DEBUG nova.network.neutron [-] [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 605.491309] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 403ef2b6c26e4c25b21ab75ea6f00e78 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 605.502736] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 403ef2b6c26e4c25b21ab75ea6f00e78 [ 605.543623] env[61273]: DEBUG nova.policy [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e40bbb7692934bf6b18281dd86f8f130', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0aadc74e25e84da28c962ea370b4abb7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 605.595232] env[61273]: DEBUG oslo_concurrency.lockutils [None req-6ab6dead-0874-4403-9491-2b16e11b1675 tempest-AttachInterfacesV270Test-2098825571 tempest-AttachInterfacesV270Test-2098825571-project-member] Lock "f0c26eb3-e6d6-4d9f-9f07-5add9de6d126" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 70.327s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 605.596110] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] Expecting reply to msg 41924e2a16c84ea9997039fa69948498 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 605.606130] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 41924e2a16c84ea9997039fa69948498 [ 605.689065] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b48cd209-8e68-4191-9a3b-76f673b13da2 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.700279] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f2c6334-45eb-4f79-bff8-7d109b58ca02 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.735422] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20ff4a6f-be85-42c8-bc84-c39947aae3ad {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.744121] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-569c9619-9a7d-422e-ba44-e94ccab46e3f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.748321] env[61273]: DEBUG nova.compute.manager [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] [instance: 1804f229-97b9-4ee3-933d-715431a900f8] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 605.751477] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] Expecting reply to msg 962d8ec740c2431bb79af1af3033fea8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 605.764722] env[61273]: DEBUG nova.compute.provider_tree [None req-c8f1f2f5-5a51-4e1d-adb8-274558f0667c tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 605.765246] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c8f1f2f5-5a51-4e1d-adb8-274558f0667c tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Expecting reply to msg 4bfc1a26e8a64361996e7ce6b2e4ecd0 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 605.786737] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4bfc1a26e8a64361996e7ce6b2e4ecd0 [ 605.808577] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 962d8ec740c2431bb79af1af3033fea8 [ 605.996894] env[61273]: DEBUG nova.network.neutron [-] [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 605.997404] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 60ca2b3118bd4f42965e593a42265271 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 606.012780] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 60ca2b3118bd4f42965e593a42265271 [ 606.100512] env[61273]: DEBUG nova.compute.manager [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 606.102239] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] Expecting reply to msg 1db3c86280074bdb99a4a0270f6d194f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 606.160671] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1db3c86280074bdb99a4a0270f6d194f [ 606.268555] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] Expecting reply to msg e332a29e8a604ad5af3c5f8a47479ff7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 606.273754] env[61273]: DEBUG nova.scheduler.client.report [None req-c8f1f2f5-5a51-4e1d-adb8-274558f0667c tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 606.276378] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c8f1f2f5-5a51-4e1d-adb8-274558f0667c tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Expecting reply to msg 05f14e45340842358376c2f96fdc54d6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 606.295407] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 05f14e45340842358376c2f96fdc54d6 [ 606.305476] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e332a29e8a604ad5af3c5f8a47479ff7 [ 606.368651] env[61273]: DEBUG nova.network.neutron [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] [instance: 1804f229-97b9-4ee3-933d-715431a900f8] Successfully created port: 2a417537-62ce-4faf-b880-d5f49a9deb7f {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 606.499392] env[61273]: INFO nova.compute.manager [-] [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] Took 1.25 seconds to deallocate network for instance. [ 606.501853] env[61273]: DEBUG nova.compute.claims [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 606.502038] env[61273]: DEBUG oslo_concurrency.lockutils [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 606.624859] env[61273]: DEBUG oslo_concurrency.lockutils [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 606.769222] env[61273]: DEBUG nova.compute.manager [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] [instance: 1804f229-97b9-4ee3-933d-715431a900f8] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 606.780176] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c8f1f2f5-5a51-4e1d-adb8-274558f0667c tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.044s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 606.789884] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 21.153s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 606.789884] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Expecting reply to msg 370f726f6bb14480824a549489ae2b49 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 606.811198] env[61273]: INFO nova.scheduler.client.report [None req-c8f1f2f5-5a51-4e1d-adb8-274558f0667c tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Deleted allocations for instance 62c3b24d-bee7-4dd2-a6c7-9303c7c28cca [ 606.815977] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c8f1f2f5-5a51-4e1d-adb8-274558f0667c tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Expecting reply to msg d4fb9071b1fa4750a7af90b174c2c4c6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 606.818792] env[61273]: DEBUG nova.virt.hardware [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 606.819019] env[61273]: DEBUG nova.virt.hardware [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 606.819189] env[61273]: DEBUG nova.virt.hardware [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 606.819361] env[61273]: DEBUG nova.virt.hardware [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 606.819505] env[61273]: DEBUG nova.virt.hardware [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 606.819945] env[61273]: DEBUG nova.virt.hardware [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 606.823909] env[61273]: DEBUG nova.virt.hardware [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 606.823909] env[61273]: DEBUG nova.virt.hardware [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 606.823909] env[61273]: DEBUG nova.virt.hardware [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 606.823909] env[61273]: DEBUG nova.virt.hardware [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 606.823909] env[61273]: DEBUG nova.virt.hardware [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 606.824303] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dabc9ce8-395c-47ed-8a7b-2a0c76a7c0eb {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.834576] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7856673f-52bc-432d-a7f5-97ab48ab76e4 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.859122] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 370f726f6bb14480824a549489ae2b49 [ 606.886583] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d4fb9071b1fa4750a7af90b174c2c4c6 [ 607.328903] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c8f1f2f5-5a51-4e1d-adb8-274558f0667c tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Lock "62c3b24d-bee7-4dd2-a6c7-9303c7c28cca" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.039s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 607.329248] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c8f1f2f5-5a51-4e1d-adb8-274558f0667c tempest-ServerDiagnosticsV248Test-943853116 tempest-ServerDiagnosticsV248Test-943853116-project-member] Expecting reply to msg 9f45719579a745c1adf9072f8a28624c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 607.345611] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9f45719579a745c1adf9072f8a28624c [ 607.471472] env[61273]: ERROR nova.compute.manager [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 2a417537-62ce-4faf-b880-d5f49a9deb7f, please check neutron logs for more information. [ 607.471472] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 607.471472] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 607.471472] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 607.471472] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 607.471472] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 607.471472] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 607.471472] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 607.471472] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 607.471472] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 607.471472] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 607.471472] env[61273]: ERROR nova.compute.manager raise self.value [ 607.471472] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 607.471472] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 607.471472] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 607.471472] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 607.471973] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 607.471973] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 607.471973] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 2a417537-62ce-4faf-b880-d5f49a9deb7f, please check neutron logs for more information. [ 607.471973] env[61273]: ERROR nova.compute.manager [ 607.471973] env[61273]: Traceback (most recent call last): [ 607.471973] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 607.471973] env[61273]: listener.cb(fileno) [ 607.471973] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 607.471973] env[61273]: result = function(*args, **kwargs) [ 607.471973] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 607.471973] env[61273]: return func(*args, **kwargs) [ 607.471973] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 607.471973] env[61273]: raise e [ 607.471973] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 607.471973] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 607.471973] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 607.471973] env[61273]: created_port_ids = self._update_ports_for_instance( [ 607.471973] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 607.471973] env[61273]: with excutils.save_and_reraise_exception(): [ 607.471973] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 607.471973] env[61273]: self.force_reraise() [ 607.471973] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 607.471973] env[61273]: raise self.value [ 607.471973] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 607.471973] env[61273]: updated_port = self._update_port( [ 607.471973] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 607.471973] env[61273]: _ensure_no_port_binding_failure(port) [ 607.471973] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 607.471973] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 607.472621] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 2a417537-62ce-4faf-b880-d5f49a9deb7f, please check neutron logs for more information. [ 607.472621] env[61273]: Removing descriptor: 15 [ 607.472671] env[61273]: ERROR nova.compute.manager [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] [instance: 1804f229-97b9-4ee3-933d-715431a900f8] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 2a417537-62ce-4faf-b880-d5f49a9deb7f, please check neutron logs for more information. [ 607.472671] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] Traceback (most recent call last): [ 607.472671] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 607.472671] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] yield resources [ 607.472671] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 607.472671] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] self.driver.spawn(context, instance, image_meta, [ 607.472671] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 607.472671] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 607.472671] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 607.472671] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] vm_ref = self.build_virtual_machine(instance, [ 607.472671] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 607.472928] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] vif_infos = vmwarevif.get_vif_info(self._session, [ 607.472928] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 607.472928] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] for vif in network_info: [ 607.472928] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 607.472928] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] return self._sync_wrapper(fn, *args, **kwargs) [ 607.472928] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 607.472928] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] self.wait() [ 607.472928] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 607.472928] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] self[:] = self._gt.wait() [ 607.472928] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 607.472928] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] return self._exit_event.wait() [ 607.472928] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 607.472928] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] result = hub.switch() [ 607.473222] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 607.473222] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] return self.greenlet.switch() [ 607.473222] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 607.473222] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] result = function(*args, **kwargs) [ 607.473222] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 607.473222] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] return func(*args, **kwargs) [ 607.473222] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 607.473222] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] raise e [ 607.473222] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 607.473222] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] nwinfo = self.network_api.allocate_for_instance( [ 607.473222] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 607.473222] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] created_port_ids = self._update_ports_for_instance( [ 607.473222] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 607.473560] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] with excutils.save_and_reraise_exception(): [ 607.473560] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 607.473560] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] self.force_reraise() [ 607.473560] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 607.473560] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] raise self.value [ 607.473560] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 607.473560] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] updated_port = self._update_port( [ 607.473560] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 607.473560] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] _ensure_no_port_binding_failure(port) [ 607.473560] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 607.473560] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] raise exception.PortBindingFailed(port_id=port['id']) [ 607.473560] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] nova.exception.PortBindingFailed: Binding failed for port 2a417537-62ce-4faf-b880-d5f49a9deb7f, please check neutron logs for more information. [ 607.473560] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] [ 607.473961] env[61273]: INFO nova.compute.manager [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] [instance: 1804f229-97b9-4ee3-933d-715431a900f8] Terminating instance [ 607.475924] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] Acquiring lock "refresh_cache-1804f229-97b9-4ee3-933d-715431a900f8" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 607.476205] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] Acquired lock "refresh_cache-1804f229-97b9-4ee3-933d-715431a900f8" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 607.476463] env[61273]: DEBUG nova.network.neutron [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] [instance: 1804f229-97b9-4ee3-933d-715431a900f8] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 607.476964] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] Expecting reply to msg ab79b43f5b1445119c710e2aacf631a7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 607.484885] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ab79b43f5b1445119c710e2aacf631a7 [ 607.740848] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d32729e3-4a91-4f60-aadf-872fe651ef7e {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.749597] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2af6ebab-b565-4439-b067-4616d01e68fc {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.782984] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4ff3bd2-4a3d-4915-b859-35e8563e4b65 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.792828] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb310372-6e07-4bff-a268-f612eda767b4 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.808230] env[61273]: DEBUG nova.compute.provider_tree [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 607.808859] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Expecting reply to msg a6e0fa9a78244c1485580475c1609e09 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 607.816085] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a6e0fa9a78244c1485580475c1609e09 [ 607.900673] env[61273]: DEBUG nova.compute.manager [req-0067d397-b3de-4bcc-91e3-680c92b35388 req-c3c4ffe1-43a6-4179-bc69-f4c820f1c11a service nova] [instance: 1804f229-97b9-4ee3-933d-715431a900f8] Received event network-changed-2a417537-62ce-4faf-b880-d5f49a9deb7f {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 607.901103] env[61273]: DEBUG nova.compute.manager [req-0067d397-b3de-4bcc-91e3-680c92b35388 req-c3c4ffe1-43a6-4179-bc69-f4c820f1c11a service nova] [instance: 1804f229-97b9-4ee3-933d-715431a900f8] Refreshing instance network info cache due to event network-changed-2a417537-62ce-4faf-b880-d5f49a9deb7f. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 607.901358] env[61273]: DEBUG oslo_concurrency.lockutils [req-0067d397-b3de-4bcc-91e3-680c92b35388 req-c3c4ffe1-43a6-4179-bc69-f4c820f1c11a service nova] Acquiring lock "refresh_cache-1804f229-97b9-4ee3-933d-715431a900f8" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 607.999586] env[61273]: DEBUG nova.network.neutron [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] [instance: 1804f229-97b9-4ee3-933d-715431a900f8] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 608.071950] env[61273]: DEBUG nova.network.neutron [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] [instance: 1804f229-97b9-4ee3-933d-715431a900f8] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 608.072511] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] Expecting reply to msg 23a2dbd2d5e74dbca9310560b57a6269 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 608.081923] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 23a2dbd2d5e74dbca9310560b57a6269 [ 608.311887] env[61273]: DEBUG nova.scheduler.client.report [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 608.314075] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Expecting reply to msg eb598392bf694030a0e10cbf24b9500b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 608.325725] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eb598392bf694030a0e10cbf24b9500b [ 608.575261] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] Releasing lock "refresh_cache-1804f229-97b9-4ee3-933d-715431a900f8" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 608.575721] env[61273]: DEBUG nova.compute.manager [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] [instance: 1804f229-97b9-4ee3-933d-715431a900f8] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 608.575913] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] [instance: 1804f229-97b9-4ee3-933d-715431a900f8] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 608.576252] env[61273]: DEBUG oslo_concurrency.lockutils [req-0067d397-b3de-4bcc-91e3-680c92b35388 req-c3c4ffe1-43a6-4179-bc69-f4c820f1c11a service nova] Acquired lock "refresh_cache-1804f229-97b9-4ee3-933d-715431a900f8" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 608.576434] env[61273]: DEBUG nova.network.neutron [req-0067d397-b3de-4bcc-91e3-680c92b35388 req-c3c4ffe1-43a6-4179-bc69-f4c820f1c11a service nova] [instance: 1804f229-97b9-4ee3-933d-715431a900f8] Refreshing network info cache for port 2a417537-62ce-4faf-b880-d5f49a9deb7f {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 608.577277] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-0067d397-b3de-4bcc-91e3-680c92b35388 req-c3c4ffe1-43a6-4179-bc69-f4c820f1c11a service nova] Expecting reply to msg 19d1a0978f85462bb06ec071eb6f5ec2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 608.578085] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8117bcdb-d180-40f7-bff9-dc1ea3e34832 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.585764] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 19d1a0978f85462bb06ec071eb6f5ec2 [ 608.588735] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dcc00c4-45b1-43eb-b5d4-d31a41055441 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.614517] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] [instance: 1804f229-97b9-4ee3-933d-715431a900f8] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1804f229-97b9-4ee3-933d-715431a900f8 could not be found. [ 608.614755] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] [instance: 1804f229-97b9-4ee3-933d-715431a900f8] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 608.614929] env[61273]: INFO nova.compute.manager [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] [instance: 1804f229-97b9-4ee3-933d-715431a900f8] Took 0.04 seconds to destroy the instance on the hypervisor. [ 608.616336] env[61273]: DEBUG oslo.service.loopingcall [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 608.616925] env[61273]: DEBUG nova.compute.manager [-] [instance: 1804f229-97b9-4ee3-933d-715431a900f8] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 608.617041] env[61273]: DEBUG nova.network.neutron [-] [instance: 1804f229-97b9-4ee3-933d-715431a900f8] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 608.638628] env[61273]: DEBUG nova.network.neutron [-] [instance: 1804f229-97b9-4ee3-933d-715431a900f8] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 608.639167] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg e1c7edaa0c15476c94791d8293d003d2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 608.646559] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e1c7edaa0c15476c94791d8293d003d2 [ 608.816494] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.031s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 608.817168] env[61273]: ERROR nova.compute.manager [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port a5e6f22e-f786-4eb3-9df7-8e5f76d3b84e, please check neutron logs for more information. [ 608.817168] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] Traceback (most recent call last): [ 608.817168] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 608.817168] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] self.driver.spawn(context, instance, image_meta, [ 608.817168] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 608.817168] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] self._vmops.spawn(context, instance, image_meta, injected_files, [ 608.817168] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 608.817168] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] vm_ref = self.build_virtual_machine(instance, [ 608.817168] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 608.817168] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] vif_infos = vmwarevif.get_vif_info(self._session, [ 608.817168] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 608.817459] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] for vif in network_info: [ 608.817459] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 608.817459] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] return self._sync_wrapper(fn, *args, **kwargs) [ 608.817459] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 608.817459] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] self.wait() [ 608.817459] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 608.817459] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] self[:] = self._gt.wait() [ 608.817459] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 608.817459] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] return self._exit_event.wait() [ 608.817459] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 608.817459] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] result = hub.switch() [ 608.817459] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 608.817459] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] return self.greenlet.switch() [ 608.817753] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 608.817753] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] result = function(*args, **kwargs) [ 608.817753] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 608.817753] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] return func(*args, **kwargs) [ 608.817753] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 608.817753] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] raise e [ 608.817753] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 608.817753] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] nwinfo = self.network_api.allocate_for_instance( [ 608.817753] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 608.817753] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] created_port_ids = self._update_ports_for_instance( [ 608.817753] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 608.817753] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] with excutils.save_and_reraise_exception(): [ 608.817753] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 608.818040] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] self.force_reraise() [ 608.818040] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 608.818040] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] raise self.value [ 608.818040] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 608.818040] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] updated_port = self._update_port( [ 608.818040] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 608.818040] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] _ensure_no_port_binding_failure(port) [ 608.818040] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 608.818040] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] raise exception.PortBindingFailed(port_id=port['id']) [ 608.818040] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] nova.exception.PortBindingFailed: Binding failed for port a5e6f22e-f786-4eb3-9df7-8e5f76d3b84e, please check neutron logs for more information. [ 608.818040] env[61273]: ERROR nova.compute.manager [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] [ 608.818303] env[61273]: DEBUG nova.compute.utils [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] Binding failed for port a5e6f22e-f786-4eb3-9df7-8e5f76d3b84e, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 608.819136] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 21.349s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 608.821212] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] Expecting reply to msg 60b107dd763e4d9fad2947c5e6c0a9fb in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 608.827760] env[61273]: DEBUG nova.compute.manager [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] Build of instance 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050 was re-scheduled: Binding failed for port a5e6f22e-f786-4eb3-9df7-8e5f76d3b84e, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 608.827760] env[61273]: DEBUG nova.compute.manager [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 608.827760] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Acquiring lock "refresh_cache-8f37dfb3-c4d2-4c41-91eb-e3c7ca640050" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 608.827760] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Acquired lock "refresh_cache-8f37dfb3-c4d2-4c41-91eb-e3c7ca640050" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 608.828061] env[61273]: DEBUG nova.network.neutron [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 608.828061] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Expecting reply to msg dad6deb247c544eebbb4f5ee1d741864 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 608.830833] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dad6deb247c544eebbb4f5ee1d741864 [ 608.883155] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 60b107dd763e4d9fad2947c5e6c0a9fb [ 609.110980] env[61273]: DEBUG nova.network.neutron [req-0067d397-b3de-4bcc-91e3-680c92b35388 req-c3c4ffe1-43a6-4179-bc69-f4c820f1c11a service nova] [instance: 1804f229-97b9-4ee3-933d-715431a900f8] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 609.142375] env[61273]: DEBUG nova.network.neutron [-] [instance: 1804f229-97b9-4ee3-933d-715431a900f8] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 609.142375] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 6f18309bb1e84d0989c8f164a8b48f82 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 609.150513] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6f18309bb1e84d0989c8f164a8b48f82 [ 609.240399] env[61273]: DEBUG nova.network.neutron [req-0067d397-b3de-4bcc-91e3-680c92b35388 req-c3c4ffe1-43a6-4179-bc69-f4c820f1c11a service nova] [instance: 1804f229-97b9-4ee3-933d-715431a900f8] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 609.240399] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-0067d397-b3de-4bcc-91e3-680c92b35388 req-c3c4ffe1-43a6-4179-bc69-f4c820f1c11a service nova] Expecting reply to msg af6393d9f3904b4c855a99493f092d7b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 609.247870] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg af6393d9f3904b4c855a99493f092d7b [ 609.346908] env[61273]: DEBUG nova.network.neutron [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 609.425104] env[61273]: DEBUG nova.network.neutron [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 609.425625] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Expecting reply to msg 84643fe8f0f74f99ac64dd7e87db3a12 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 609.433683] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 84643fe8f0f74f99ac64dd7e87db3a12 [ 609.644260] env[61273]: INFO nova.compute.manager [-] [instance: 1804f229-97b9-4ee3-933d-715431a900f8] Took 1.03 seconds to deallocate network for instance. [ 609.646607] env[61273]: DEBUG nova.compute.claims [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] [instance: 1804f229-97b9-4ee3-933d-715431a900f8] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 609.646781] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 609.744064] env[61273]: DEBUG oslo_concurrency.lockutils [req-0067d397-b3de-4bcc-91e3-680c92b35388 req-c3c4ffe1-43a6-4179-bc69-f4c820f1c11a service nova] Releasing lock "refresh_cache-1804f229-97b9-4ee3-933d-715431a900f8" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 609.744064] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43fbb46b-fd20-437e-8871-cef5a5fd8e58 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.752564] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cd48b9e-6fda-4f6f-bb4c-32060a94dbea {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.786716] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-370a1cdf-902a-48a6-9dd0-27950570630e {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.796574] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c86353b3-bb01-489f-8132-f33025784154 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.811334] env[61273]: DEBUG nova.compute.provider_tree [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 609.812266] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] Expecting reply to msg d4042efbff794634bc6a42aebf7381e6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 609.820543] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d4042efbff794634bc6a42aebf7381e6 [ 609.930812] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Releasing lock "refresh_cache-8f37dfb3-c4d2-4c41-91eb-e3c7ca640050" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 609.930812] env[61273]: DEBUG nova.compute.manager [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 609.930812] env[61273]: DEBUG nova.compute.manager [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 609.930812] env[61273]: DEBUG nova.network.neutron [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 609.950832] env[61273]: DEBUG nova.network.neutron [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 609.950832] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Expecting reply to msg e60fff38bbe24cab84e68d84bd29a727 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 609.957919] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e60fff38bbe24cab84e68d84bd29a727 [ 610.215803] env[61273]: DEBUG nova.compute.manager [req-79dab9ec-5dcd-48de-a49c-57c6d10bdb81 req-cfdccffc-e3aa-4767-ac2a-6cef9a812147 service nova] [instance: 1804f229-97b9-4ee3-933d-715431a900f8] Received event network-vif-deleted-2a417537-62ce-4faf-b880-d5f49a9deb7f {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 610.232730] env[61273]: DEBUG oslo_concurrency.lockutils [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Acquiring lock "bf40cc8c-4729-49c5-8c9d-e3ee09606aa5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 610.232953] env[61273]: DEBUG oslo_concurrency.lockutils [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Lock "bf40cc8c-4729-49c5-8c9d-e3ee09606aa5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 610.314407] env[61273]: DEBUG nova.scheduler.client.report [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 610.316949] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] Expecting reply to msg cd9095b58e314e03a01b2bfd3ff4b368 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 610.333474] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cd9095b58e314e03a01b2bfd3ff4b368 [ 610.452952] env[61273]: DEBUG nova.network.neutron [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 610.453527] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Expecting reply to msg c045ac31c1e54c85ad83540af9033027 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 610.461253] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c045ac31c1e54c85ad83540af9033027 [ 610.823170] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.003s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 610.823480] env[61273]: ERROR nova.compute.manager [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 2c9b8cd4-1c30-438e-a736-fd3acb6e41ac, please check neutron logs for more information. [ 610.823480] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] Traceback (most recent call last): [ 610.823480] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 610.823480] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] self.driver.spawn(context, instance, image_meta, [ 610.823480] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 610.823480] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] self._vmops.spawn(context, instance, image_meta, injected_files, [ 610.823480] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 610.823480] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] vm_ref = self.build_virtual_machine(instance, [ 610.823480] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 610.823480] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] vif_infos = vmwarevif.get_vif_info(self._session, [ 610.823480] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 610.823755] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] for vif in network_info: [ 610.823755] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 610.823755] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] return self._sync_wrapper(fn, *args, **kwargs) [ 610.823755] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 610.823755] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] self.wait() [ 610.823755] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 610.823755] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] self[:] = self._gt.wait() [ 610.823755] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 610.823755] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] return self._exit_event.wait() [ 610.823755] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 610.823755] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] result = hub.switch() [ 610.823755] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 610.823755] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] return self.greenlet.switch() [ 610.824091] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 610.824091] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] result = function(*args, **kwargs) [ 610.824091] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 610.824091] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] return func(*args, **kwargs) [ 610.824091] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 610.824091] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] raise e [ 610.824091] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 610.824091] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] nwinfo = self.network_api.allocate_for_instance( [ 610.824091] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 610.824091] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] created_port_ids = self._update_ports_for_instance( [ 610.824091] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 610.824091] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] with excutils.save_and_reraise_exception(): [ 610.824091] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 610.824381] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] self.force_reraise() [ 610.824381] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 610.824381] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] raise self.value [ 610.824381] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 610.824381] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] updated_port = self._update_port( [ 610.824381] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 610.824381] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] _ensure_no_port_binding_failure(port) [ 610.824381] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 610.824381] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] raise exception.PortBindingFailed(port_id=port['id']) [ 610.824381] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] nova.exception.PortBindingFailed: Binding failed for port 2c9b8cd4-1c30-438e-a736-fd3acb6e41ac, please check neutron logs for more information. [ 610.824381] env[61273]: ERROR nova.compute.manager [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] [ 610.824637] env[61273]: DEBUG nova.compute.utils [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] Binding failed for port 2c9b8cd4-1c30-438e-a736-fd3acb6e41ac, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 610.825338] env[61273]: DEBUG oslo_concurrency.lockutils [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 21.314s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 610.827339] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] Expecting reply to msg 9f7a2859e122473799b8031dc4251593 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 610.831850] env[61273]: DEBUG nova.compute.manager [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] Build of instance 3f1f549f-8034-4685-b6f0-db5a7a2a4a32 was re-scheduled: Binding failed for port 2c9b8cd4-1c30-438e-a736-fd3acb6e41ac, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 610.831850] env[61273]: DEBUG nova.compute.manager [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 610.831850] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] Acquiring lock "refresh_cache-3f1f549f-8034-4685-b6f0-db5a7a2a4a32" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 610.831850] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] Acquired lock "refresh_cache-3f1f549f-8034-4685-b6f0-db5a7a2a4a32" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 610.832045] env[61273]: DEBUG nova.network.neutron [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 610.832045] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] Expecting reply to msg 91c8cc3f191a4129a480ffd8147229a0 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 610.845303] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 91c8cc3f191a4129a480ffd8147229a0 [ 610.888362] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9f7a2859e122473799b8031dc4251593 [ 610.962890] env[61273]: INFO nova.compute.manager [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050] Took 1.03 seconds to deallocate network for instance. [ 610.964772] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Expecting reply to msg d16fadae1a594be59e7008d01e4aaf4b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 611.034697] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d16fadae1a594be59e7008d01e4aaf4b [ 611.363317] env[61273]: DEBUG nova.network.neutron [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 611.479704] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Expecting reply to msg b1b60935422c48bc8fe2d64cafda34ba in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 611.483726] env[61273]: DEBUG nova.network.neutron [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 611.484356] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] Expecting reply to msg e42fac5934c64f41ba2fb5ccab5542cc in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 611.495763] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e42fac5934c64f41ba2fb5ccab5542cc [ 611.523061] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b1b60935422c48bc8fe2d64cafda34ba [ 611.710688] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64b64308-f1b5-45ba-9661-e24de6bb2215 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.720600] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61903af8-6aa2-45c6-878f-a7c20ce88c99 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.754735] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-901f05cf-d88d-4bc5-8413-8514921b4031 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.763099] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adabdc7d-84f7-413f-9576-c80564b8f496 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.777041] env[61273]: DEBUG nova.compute.provider_tree [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 611.777546] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] Expecting reply to msg 69d326ea767248538e4c3448579973dd in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 611.786510] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 69d326ea767248538e4c3448579973dd [ 611.990705] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] Releasing lock "refresh_cache-3f1f549f-8034-4685-b6f0-db5a7a2a4a32" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 611.990980] env[61273]: DEBUG nova.compute.manager [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 611.991120] env[61273]: DEBUG nova.compute.manager [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 611.991295] env[61273]: DEBUG nova.network.neutron [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 612.007374] env[61273]: INFO nova.scheduler.client.report [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Deleted allocations for instance 8f37dfb3-c4d2-4c41-91eb-e3c7ca640050 [ 612.013500] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Expecting reply to msg 965af2490e6f49e19fb81ab49b101b0c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 612.026488] env[61273]: DEBUG nova.network.neutron [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 612.027084] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] Expecting reply to msg 79072e6660494c6ba28a800a66483180 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 612.029638] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 965af2490e6f49e19fb81ab49b101b0c [ 612.041411] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 79072e6660494c6ba28a800a66483180 [ 612.280051] env[61273]: DEBUG nova.scheduler.client.report [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 612.282472] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] Expecting reply to msg 4c06b78132e843ddb5efe25274406eb6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 612.296243] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4c06b78132e843ddb5efe25274406eb6 [ 612.519263] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7e015aab-3e7e-485b-abca-dc5626d0be89 tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Lock "8f37dfb3-c4d2-4c41-91eb-e3c7ca640050" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 75.852s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 612.519957] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Expecting reply to msg 81beb89dfe2242b3b0d93c2682f45117 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 612.529423] env[61273]: DEBUG nova.network.neutron [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 612.529965] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] Expecting reply to msg 14eecf2fe3cc4e2f9d2841a840dc8675 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 612.532760] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 81beb89dfe2242b3b0d93c2682f45117 [ 612.541027] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 14eecf2fe3cc4e2f9d2841a840dc8675 [ 612.786590] env[61273]: DEBUG oslo_concurrency.lockutils [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.960s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 612.786590] env[61273]: ERROR nova.compute.manager [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port a2439eab-fa0c-4c49-9830-697e9a092f05, please check neutron logs for more information. [ 612.786590] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] Traceback (most recent call last): [ 612.786590] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 612.786590] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] self.driver.spawn(context, instance, image_meta, [ 612.786590] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 612.786590] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 612.786590] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 612.786590] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] vm_ref = self.build_virtual_machine(instance, [ 612.786937] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 612.786937] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] vif_infos = vmwarevif.get_vif_info(self._session, [ 612.786937] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 612.786937] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] for vif in network_info: [ 612.786937] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 612.786937] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] return self._sync_wrapper(fn, *args, **kwargs) [ 612.786937] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 612.786937] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] self.wait() [ 612.786937] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 612.786937] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] self[:] = self._gt.wait() [ 612.786937] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 612.786937] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] return self._exit_event.wait() [ 612.786937] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 612.787254] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] current.throw(*self._exc) [ 612.787254] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 612.787254] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] result = function(*args, **kwargs) [ 612.787254] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 612.787254] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] return func(*args, **kwargs) [ 612.787254] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 612.787254] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] raise e [ 612.787254] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 612.787254] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] nwinfo = self.network_api.allocate_for_instance( [ 612.787254] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 612.787254] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] created_port_ids = self._update_ports_for_instance( [ 612.787254] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 612.787254] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] with excutils.save_and_reraise_exception(): [ 612.787644] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 612.787644] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] self.force_reraise() [ 612.787644] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 612.787644] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] raise self.value [ 612.787644] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 612.787644] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] updated_port = self._update_port( [ 612.787644] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 612.787644] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] _ensure_no_port_binding_failure(port) [ 612.787644] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 612.787644] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] raise exception.PortBindingFailed(port_id=port['id']) [ 612.787644] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] nova.exception.PortBindingFailed: Binding failed for port a2439eab-fa0c-4c49-9830-697e9a092f05, please check neutron logs for more information. [ 612.787644] env[61273]: ERROR nova.compute.manager [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] [ 612.787916] env[61273]: DEBUG nova.compute.utils [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] Binding failed for port a2439eab-fa0c-4c49-9830-697e9a092f05, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 612.788463] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 20.636s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 612.790725] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Expecting reply to msg 7467338a4ec74ec691621a4cb0c55f0d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 612.792364] env[61273]: DEBUG nova.compute.manager [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] Build of instance 36c3ac75-5bfd-4a89-9ddb-28fded8da39c was re-scheduled: Binding failed for port a2439eab-fa0c-4c49-9830-697e9a092f05, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 612.792764] env[61273]: DEBUG nova.compute.manager [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 612.792984] env[61273]: DEBUG oslo_concurrency.lockutils [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] Acquiring lock "refresh_cache-36c3ac75-5bfd-4a89-9ddb-28fded8da39c" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 612.793131] env[61273]: DEBUG oslo_concurrency.lockutils [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] Acquired lock "refresh_cache-36c3ac75-5bfd-4a89-9ddb-28fded8da39c" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 612.793287] env[61273]: DEBUG nova.network.neutron [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 612.793770] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] Expecting reply to msg 70770852ec874826976ae478f3e76b45 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 612.805223] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 70770852ec874826976ae478f3e76b45 [ 612.843654] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7467338a4ec74ec691621a4cb0c55f0d [ 613.021983] env[61273]: DEBUG nova.compute.manager [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 613.023852] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Expecting reply to msg 3c0e2b29f8b24a5bb48acbb13cbb39e6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 613.034000] env[61273]: INFO nova.compute.manager [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] [instance: 3f1f549f-8034-4685-b6f0-db5a7a2a4a32] Took 1.04 seconds to deallocate network for instance. [ 613.035773] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] Expecting reply to msg de9a84000db04683a2cccc301cdd4cfb in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 613.076728] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3c0e2b29f8b24a5bb48acbb13cbb39e6 [ 613.085015] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg de9a84000db04683a2cccc301cdd4cfb [ 613.317074] env[61273]: DEBUG nova.network.neutron [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 613.421923] env[61273]: DEBUG nova.network.neutron [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 613.422437] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] Expecting reply to msg 47640ca4dea04bbeabe707827152051b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 613.446016] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 47640ca4dea04bbeabe707827152051b [ 613.545091] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] Expecting reply to msg 85fdf6d261ba49669d2f61f109843605 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 613.555157] env[61273]: DEBUG oslo_concurrency.lockutils [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 613.585479] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 85fdf6d261ba49669d2f61f109843605 [ 613.731789] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00c8b08c-bd59-43a4-897b-c1465bbe1138 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.739813] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb40214c-ee82-4e83-8391-a77eb0550fe5 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.783904] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55806970-b0a7-4834-b72b-5faea24ecbe0 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.793724] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abe2a23a-7007-4c16-a3c0-e4194529994b {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.813327] env[61273]: DEBUG nova.compute.provider_tree [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 613.813970] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Expecting reply to msg 4f4d53853c4443dea0053ccf9a767c86 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 613.825652] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4f4d53853c4443dea0053ccf9a767c86 [ 613.925174] env[61273]: DEBUG oslo_concurrency.lockutils [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] Releasing lock "refresh_cache-36c3ac75-5bfd-4a89-9ddb-28fded8da39c" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 613.925410] env[61273]: DEBUG nova.compute.manager [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 613.925592] env[61273]: DEBUG nova.compute.manager [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 613.925782] env[61273]: DEBUG nova.network.neutron [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 613.942355] env[61273]: DEBUG nova.network.neutron [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 613.943023] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] Expecting reply to msg 03285661d3cc4fe6a2a07b0a1b490b79 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 613.950764] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 03285661d3cc4fe6a2a07b0a1b490b79 [ 614.076044] env[61273]: INFO nova.scheduler.client.report [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] Deleted allocations for instance 3f1f549f-8034-4685-b6f0-db5a7a2a4a32 [ 614.083182] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] Expecting reply to msg 128debdffc1c46088bcbc3b3e0f5b8ba in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 614.105371] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 128debdffc1c46088bcbc3b3e0f5b8ba [ 614.255991] env[61273]: DEBUG oslo_concurrency.lockutils [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] Acquiring lock "23774aa5-1608-495f-8015-29e25f856c69" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 614.256484] env[61273]: DEBUG oslo_concurrency.lockutils [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] Lock "23774aa5-1608-495f-8015-29e25f856c69" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 614.317923] env[61273]: DEBUG nova.scheduler.client.report [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 614.320457] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Expecting reply to msg 90dfd49e323a46efbff0381d6fcce286 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 614.340573] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 90dfd49e323a46efbff0381d6fcce286 [ 614.445548] env[61273]: DEBUG nova.network.neutron [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 614.446178] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] Expecting reply to msg 74c2533029094e36865e0d5ebd7744f6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 614.455090] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 74c2533029094e36865e0d5ebd7744f6 [ 614.587209] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ff4d2de5-6719-470e-b0fd-ff1700045390 tempest-ServerActionsTestOtherA-1252012706 tempest-ServerActionsTestOtherA-1252012706-project-member] Lock "3f1f549f-8034-4685-b6f0-db5a7a2a4a32" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 76.424s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 614.588193] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] Expecting reply to msg 5c3778d7db9a4c08a5752caca394d751 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 614.643990] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5c3778d7db9a4c08a5752caca394d751 [ 614.824764] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.035s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 614.824764] env[61273]: ERROR nova.compute.manager [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 319ef93a-07c4-43fa-885a-2600407961b4, please check neutron logs for more information. [ 614.824764] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] Traceback (most recent call last): [ 614.824764] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 614.824764] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] self.driver.spawn(context, instance, image_meta, [ 614.824764] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 614.824764] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 614.824764] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 614.824764] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] vm_ref = self.build_virtual_machine(instance, [ 614.825141] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 614.825141] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] vif_infos = vmwarevif.get_vif_info(self._session, [ 614.825141] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 614.825141] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] for vif in network_info: [ 614.825141] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 614.825141] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] return self._sync_wrapper(fn, *args, **kwargs) [ 614.825141] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 614.825141] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] self.wait() [ 614.825141] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 614.825141] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] self[:] = self._gt.wait() [ 614.825141] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 614.825141] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] return self._exit_event.wait() [ 614.825141] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 614.825485] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] result = hub.switch() [ 614.825485] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 614.825485] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] return self.greenlet.switch() [ 614.825485] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 614.825485] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] result = function(*args, **kwargs) [ 614.825485] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 614.825485] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] return func(*args, **kwargs) [ 614.825485] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 614.825485] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] raise e [ 614.825485] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 614.825485] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] nwinfo = self.network_api.allocate_for_instance( [ 614.825485] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 614.825485] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] created_port_ids = self._update_ports_for_instance( [ 614.825849] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 614.825849] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] with excutils.save_and_reraise_exception(): [ 614.825849] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 614.825849] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] self.force_reraise() [ 614.825849] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 614.825849] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] raise self.value [ 614.825849] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 614.825849] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] updated_port = self._update_port( [ 614.825849] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 614.825849] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] _ensure_no_port_binding_failure(port) [ 614.825849] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 614.825849] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] raise exception.PortBindingFailed(port_id=port['id']) [ 614.826144] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] nova.exception.PortBindingFailed: Binding failed for port 319ef93a-07c4-43fa-885a-2600407961b4, please check neutron logs for more information. [ 614.826144] env[61273]: ERROR nova.compute.manager [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] [ 614.826144] env[61273]: DEBUG nova.compute.utils [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] Binding failed for port 319ef93a-07c4-43fa-885a-2600407961b4, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 614.828917] env[61273]: DEBUG oslo_concurrency.lockutils [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 20.242s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 614.828917] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Expecting reply to msg b844436978e8460e9a16b717a8982a7d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 614.831288] env[61273]: DEBUG nova.compute.manager [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] Build of instance 5cddeea1-7558-4c12-afdc-2ea7a706881a was re-scheduled: Binding failed for port 319ef93a-07c4-43fa-885a-2600407961b4, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 614.831926] env[61273]: DEBUG nova.compute.manager [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 614.832199] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Acquiring lock "refresh_cache-5cddeea1-7558-4c12-afdc-2ea7a706881a" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 614.833088] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Acquired lock "refresh_cache-5cddeea1-7558-4c12-afdc-2ea7a706881a" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 614.833088] env[61273]: DEBUG nova.network.neutron [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 614.833088] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Expecting reply to msg f4802b3bcc7e45edbc676d9942781a46 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 614.844647] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f4802b3bcc7e45edbc676d9942781a46 [ 614.876666] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b844436978e8460e9a16b717a8982a7d [ 614.889771] env[61273]: DEBUG oslo_concurrency.lockutils [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] Acquiring lock "b6a158f8-6e2a-4967-ad05-761804ec6590" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 614.889983] env[61273]: DEBUG oslo_concurrency.lockutils [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] Lock "b6a158f8-6e2a-4967-ad05-761804ec6590" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 614.948521] env[61273]: INFO nova.compute.manager [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] [instance: 36c3ac75-5bfd-4a89-9ddb-28fded8da39c] Took 1.02 seconds to deallocate network for instance. [ 614.951949] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] Expecting reply to msg 4fd9261ff18a42d69898f2f9c763d1a8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 614.991399] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4fd9261ff18a42d69898f2f9c763d1a8 [ 615.091526] env[61273]: DEBUG nova.compute.manager [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] [instance: 13c1d417-4087-46ad-b513-fc3317995d18] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 615.093728] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] Expecting reply to msg eceab6fc2af64afd8b056950bda0f166 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 615.137121] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eceab6fc2af64afd8b056950bda0f166 [ 615.371500] env[61273]: DEBUG nova.network.neutron [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 615.455785] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] Expecting reply to msg 158cd5cbd8564f93b8d2eef0613ad10a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 615.493746] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 158cd5cbd8564f93b8d2eef0613ad10a [ 615.520078] env[61273]: DEBUG nova.network.neutron [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 615.521161] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Expecting reply to msg 14c980d5b473490c8e4a067057864f7a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 615.532556] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 14c980d5b473490c8e4a067057864f7a [ 615.616024] env[61273]: DEBUG oslo_concurrency.lockutils [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 615.789383] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6792b14c-a930-4c02-ab63-a7c9ce8a09a8 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.797315] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa2fa09f-fb02-4965-a504-4c25c0b6f62d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.828079] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27187ab1-e151-468a-94ec-dd1ea7923999 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.837105] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56b7b41d-3f9a-4e44-a2e3-ad2fab16a9ff {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.852942] env[61273]: DEBUG nova.compute.provider_tree [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 615.853566] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Expecting reply to msg 5b2e2477bafb46bb91740e6f33b4475c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 615.863407] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5b2e2477bafb46bb91740e6f33b4475c [ 615.980330] env[61273]: INFO nova.scheduler.client.report [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] Deleted allocations for instance 36c3ac75-5bfd-4a89-9ddb-28fded8da39c [ 615.986720] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] Expecting reply to msg a969605f2b7a4946ac9e3adc358b47c2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 616.006095] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a969605f2b7a4946ac9e3adc358b47c2 [ 616.023783] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Releasing lock "refresh_cache-5cddeea1-7558-4c12-afdc-2ea7a706881a" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 616.024098] env[61273]: DEBUG nova.compute.manager [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 616.024290] env[61273]: DEBUG nova.compute.manager [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 616.024452] env[61273]: DEBUG nova.network.neutron [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 616.050678] env[61273]: DEBUG nova.network.neutron [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 616.051254] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Expecting reply to msg d1fcfb5afbb04bccb9e822d3e50cce45 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 616.059067] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d1fcfb5afbb04bccb9e822d3e50cce45 [ 616.355921] env[61273]: DEBUG nova.scheduler.client.report [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 616.358340] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Expecting reply to msg 7ecb64b70a784c90ae941b7f839426e4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 616.374552] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7ecb64b70a784c90ae941b7f839426e4 [ 616.489349] env[61273]: DEBUG oslo_concurrency.lockutils [None req-2b100f97-892f-472e-a7d9-9ca8350c8021 tempest-ServersWithSpecificFlavorTestJSON-970394519 tempest-ServersWithSpecificFlavorTestJSON-970394519-project-member] Lock "36c3ac75-5bfd-4a89-9ddb-28fded8da39c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.381s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 616.489982] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] Expecting reply to msg 1eb3f353bfe64c7495035161c577add9 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 616.501305] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1eb3f353bfe64c7495035161c577add9 [ 616.554061] env[61273]: DEBUG nova.network.neutron [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 616.554599] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Expecting reply to msg 910ec0e4bc954883b4227ba2d79ab666 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 616.562945] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 910ec0e4bc954883b4227ba2d79ab666 [ 617.256161] env[61273]: DEBUG oslo_concurrency.lockutils [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.430s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 617.256884] env[61273]: ERROR nova.compute.manager [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 6c735bdb-9615-424c-b35d-b618b55a0ca8, please check neutron logs for more information. [ 617.256884] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] Traceback (most recent call last): [ 617.256884] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 617.256884] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] self.driver.spawn(context, instance, image_meta, [ 617.256884] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 617.256884] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] self._vmops.spawn(context, instance, image_meta, injected_files, [ 617.256884] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 617.256884] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] vm_ref = self.build_virtual_machine(instance, [ 617.256884] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 617.256884] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] vif_infos = vmwarevif.get_vif_info(self._session, [ 617.256884] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 617.257255] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] for vif in network_info: [ 617.257255] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 617.257255] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] return self._sync_wrapper(fn, *args, **kwargs) [ 617.257255] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 617.257255] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] self.wait() [ 617.257255] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 617.257255] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] self[:] = self._gt.wait() [ 617.257255] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 617.257255] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] return self._exit_event.wait() [ 617.257255] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 617.257255] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] current.throw(*self._exc) [ 617.257255] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 617.257255] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] result = function(*args, **kwargs) [ 617.257727] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 617.257727] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] return func(*args, **kwargs) [ 617.257727] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 617.257727] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] raise e [ 617.257727] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 617.257727] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] nwinfo = self.network_api.allocate_for_instance( [ 617.257727] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 617.257727] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] created_port_ids = self._update_ports_for_instance( [ 617.257727] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 617.257727] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] with excutils.save_and_reraise_exception(): [ 617.257727] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 617.257727] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] self.force_reraise() [ 617.257727] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 617.258885] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] raise self.value [ 617.258885] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 617.258885] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] updated_port = self._update_port( [ 617.258885] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 617.258885] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] _ensure_no_port_binding_failure(port) [ 617.258885] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 617.258885] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] raise exception.PortBindingFailed(port_id=port['id']) [ 617.258885] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] nova.exception.PortBindingFailed: Binding failed for port 6c735bdb-9615-424c-b35d-b618b55a0ca8, please check neutron logs for more information. [ 617.258885] env[61273]: ERROR nova.compute.manager [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] [ 617.258885] env[61273]: DEBUG nova.compute.utils [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] Binding failed for port 6c735bdb-9615-424c-b35d-b618b55a0ca8, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 617.259967] env[61273]: DEBUG nova.compute.manager [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 617.261676] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] Expecting reply to msg 348fd8ed2b2140e9b6b3e2ffd6301a58 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 617.262657] env[61273]: INFO nova.compute.manager [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 5cddeea1-7558-4c12-afdc-2ea7a706881a] Took 1.24 seconds to deallocate network for instance. [ 617.264172] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Expecting reply to msg aa7467ba03db4e2d9b8d6ba6a397980e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 617.265260] env[61273]: DEBUG nova.compute.manager [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] Build of instance 7f8b08d4-3535-48ab-ba3f-a159511e2a64 was re-scheduled: Binding failed for port 6c735bdb-9615-424c-b35d-b618b55a0ca8, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 617.265693] env[61273]: DEBUG nova.compute.manager [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 617.265905] env[61273]: DEBUG oslo_concurrency.lockutils [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Acquiring lock "refresh_cache-7f8b08d4-3535-48ab-ba3f-a159511e2a64" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 617.266040] env[61273]: DEBUG oslo_concurrency.lockutils [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Acquired lock "refresh_cache-7f8b08d4-3535-48ab-ba3f-a159511e2a64" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 617.266187] env[61273]: DEBUG nova.network.neutron [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 617.266532] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Expecting reply to msg 89f63338e0f84ec68d78cce7366d661f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 617.267505] env[61273]: DEBUG oslo_concurrency.lockutils [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 19.071s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 617.269142] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] Expecting reply to msg 52309d76bb5840f78aefeb2dde448ae7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 617.281978] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 89f63338e0f84ec68d78cce7366d661f [ 617.297358] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 348fd8ed2b2140e9b6b3e2ffd6301a58 [ 617.328229] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 52309d76bb5840f78aefeb2dde448ae7 [ 617.331144] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aa7467ba03db4e2d9b8d6ba6a397980e [ 617.779528] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Expecting reply to msg 3bc1f6608fd4403fa728b98acac7ae24 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 617.801090] env[61273]: DEBUG nova.network.neutron [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 617.803920] env[61273]: DEBUG oslo_concurrency.lockutils [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 617.866663] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3bc1f6608fd4403fa728b98acac7ae24 [ 617.904911] env[61273]: DEBUG nova.network.neutron [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 617.905555] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Expecting reply to msg f7ac9edf6e25487e958dc6415b0a708a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 617.916135] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f7ac9edf6e25487e958dc6415b0a708a [ 618.143265] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-222668f0-a1a9-47e5-b6bf-20e745f45f84 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.151965] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3259440d-9433-4eb1-b119-a4c46ccc15db {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.186848] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4a8bef9-7413-4c25-bd01-249d56f92930 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.195120] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99477c0d-b30b-4408-ab08-89bf5b925ec3 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.209749] env[61273]: DEBUG nova.compute.provider_tree [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 618.210247] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] Expecting reply to msg 989d12145f1845d4979e5c4e69d4f545 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 618.220481] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 989d12145f1845d4979e5c4e69d4f545 [ 618.310396] env[61273]: INFO nova.scheduler.client.report [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Deleted allocations for instance 5cddeea1-7558-4c12-afdc-2ea7a706881a [ 618.317431] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Expecting reply to msg e832015b600649228b0ddbbc29a80a29 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 618.329371] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e832015b600649228b0ddbbc29a80a29 [ 618.361754] env[61273]: DEBUG oslo_concurrency.lockutils [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Acquiring lock "ca8a38c7-a81c-407a-9558-3d15e492d9fa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 618.362057] env[61273]: DEBUG oslo_concurrency.lockutils [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Lock "ca8a38c7-a81c-407a-9558-3d15e492d9fa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 618.408627] env[61273]: DEBUG oslo_concurrency.lockutils [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Releasing lock "refresh_cache-7f8b08d4-3535-48ab-ba3f-a159511e2a64" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 618.408925] env[61273]: DEBUG nova.compute.manager [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 618.409116] env[61273]: DEBUG nova.compute.manager [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 618.409276] env[61273]: DEBUG nova.network.neutron [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 618.426685] env[61273]: DEBUG nova.network.neutron [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 618.427333] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Expecting reply to msg f65c9f63d96345f393d8f00252973b18 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 618.433828] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f65c9f63d96345f393d8f00252973b18 [ 618.713453] env[61273]: DEBUG nova.scheduler.client.report [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 618.715327] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] Expecting reply to msg 6e1f3c2874034ccab6888bbb0a922857 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 618.732035] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6e1f3c2874034ccab6888bbb0a922857 [ 618.820033] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ae7e47f1-e831-45dc-b4b0-3acce34fffb3 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Lock "5cddeea1-7558-4c12-afdc-2ea7a706881a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.816s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 618.820657] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] Expecting reply to msg e68cea3566874823b88aa84fde7cccaa in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 618.833449] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e68cea3566874823b88aa84fde7cccaa [ 618.929653] env[61273]: DEBUG nova.network.neutron [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 618.930163] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Expecting reply to msg 17c29a56a7e249cfbac752d25e9c63ba in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 618.940741] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 17c29a56a7e249cfbac752d25e9c63ba [ 619.051296] env[61273]: DEBUG oslo_concurrency.lockutils [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Acquiring lock "1efd9cfe-3a0c-412c-aa44-3bf650d08f9d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 619.051296] env[61273]: DEBUG oslo_concurrency.lockutils [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Lock "1efd9cfe-3a0c-412c-aa44-3bf650d08f9d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 619.226971] env[61273]: DEBUG oslo_concurrency.lockutils [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.952s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 619.226971] env[61273]: ERROR nova.compute.manager [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 8061659c-a94d-4f7d-a527-0f760ebc0807, please check neutron logs for more information. [ 619.226971] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] Traceback (most recent call last): [ 619.226971] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 619.226971] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] self.driver.spawn(context, instance, image_meta, [ 619.226971] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 619.226971] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] self._vmops.spawn(context, instance, image_meta, injected_files, [ 619.226971] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 619.226971] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] vm_ref = self.build_virtual_machine(instance, [ 619.227326] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 619.227326] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] vif_infos = vmwarevif.get_vif_info(self._session, [ 619.227326] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 619.227326] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] for vif in network_info: [ 619.227326] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 619.227326] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] return self._sync_wrapper(fn, *args, **kwargs) [ 619.227326] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 619.227326] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] self.wait() [ 619.227326] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 619.227326] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] self[:] = self._gt.wait() [ 619.227326] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 619.227326] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] return self._exit_event.wait() [ 619.227326] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 619.227701] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] result = hub.switch() [ 619.227701] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 619.227701] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] return self.greenlet.switch() [ 619.227701] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 619.227701] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] result = function(*args, **kwargs) [ 619.227701] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 619.227701] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] return func(*args, **kwargs) [ 619.227701] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 619.227701] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] raise e [ 619.227701] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 619.227701] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] nwinfo = self.network_api.allocate_for_instance( [ 619.227701] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 619.227701] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] created_port_ids = self._update_ports_for_instance( [ 619.228203] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 619.228203] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] with excutils.save_and_reraise_exception(): [ 619.228203] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 619.228203] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] self.force_reraise() [ 619.228203] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 619.228203] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] raise self.value [ 619.228203] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 619.228203] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] updated_port = self._update_port( [ 619.228203] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 619.228203] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] _ensure_no_port_binding_failure(port) [ 619.228203] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 619.228203] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] raise exception.PortBindingFailed(port_id=port['id']) [ 619.228621] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] nova.exception.PortBindingFailed: Binding failed for port 8061659c-a94d-4f7d-a527-0f760ebc0807, please check neutron logs for more information. [ 619.228621] env[61273]: ERROR nova.compute.manager [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] [ 619.228621] env[61273]: DEBUG nova.compute.utils [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] Binding failed for port 8061659c-a94d-4f7d-a527-0f760ebc0807, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 619.228621] env[61273]: DEBUG oslo_concurrency.lockutils [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.081s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 619.228621] env[61273]: INFO nova.compute.claims [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 619.228621] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg fcf4fff80d2c4ee48737a58298e3ab7a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 619.228806] env[61273]: DEBUG nova.compute.manager [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] Build of instance 7d1f7566-8e5e-476c-9d19-49ed7b16c308 was re-scheduled: Binding failed for port 8061659c-a94d-4f7d-a527-0f760ebc0807, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 619.229343] env[61273]: DEBUG nova.compute.manager [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 619.229694] env[61273]: DEBUG oslo_concurrency.lockutils [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] Acquiring lock "refresh_cache-7d1f7566-8e5e-476c-9d19-49ed7b16c308" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 619.229960] env[61273]: DEBUG oslo_concurrency.lockutils [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] Acquired lock "refresh_cache-7d1f7566-8e5e-476c-9d19-49ed7b16c308" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 619.230239] env[61273]: DEBUG nova.network.neutron [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 619.230809] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] Expecting reply to msg 22b1140095fc4f23a13bfbdaaab4e74b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 619.240117] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 22b1140095fc4f23a13bfbdaaab4e74b [ 619.265639] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fcf4fff80d2c4ee48737a58298e3ab7a [ 619.322389] env[61273]: DEBUG nova.compute.manager [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 619.324927] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] Expecting reply to msg d41a3e47351743e48183037c5e46eed3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 619.365103] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d41a3e47351743e48183037c5e46eed3 [ 619.432708] env[61273]: INFO nova.compute.manager [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] [instance: 7f8b08d4-3535-48ab-ba3f-a159511e2a64] Took 1.02 seconds to deallocate network for instance. [ 619.434397] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Expecting reply to msg 4ee0d4fb15cb49588ad7dc9551c99ab1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 619.476298] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4ee0d4fb15cb49588ad7dc9551c99ab1 [ 619.735294] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg ace00942e2984eadb57e35598457f1b9 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 619.743875] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ace00942e2984eadb57e35598457f1b9 [ 619.763938] env[61273]: DEBUG nova.network.neutron [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 619.832757] env[61273]: DEBUG nova.network.neutron [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 619.833330] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] Expecting reply to msg 06f90ddc8d5d4a4d82f5876f833ffbcd in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 619.853192] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 06f90ddc8d5d4a4d82f5876f833ffbcd [ 619.862910] env[61273]: DEBUG oslo_concurrency.lockutils [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 619.939150] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Expecting reply to msg 29e0b2afdd08444f967be0a062cac443 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 619.975937] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 29e0b2afdd08444f967be0a062cac443 [ 620.283298] env[61273]: DEBUG oslo_concurrency.lockutils [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Acquiring lock "799d4a06-f7a3-4b92-8e96-ac076848fdd3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 620.283690] env[61273]: DEBUG oslo_concurrency.lockutils [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Lock "799d4a06-f7a3-4b92-8e96-ac076848fdd3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 620.343138] env[61273]: DEBUG oslo_concurrency.lockutils [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] Releasing lock "refresh_cache-7d1f7566-8e5e-476c-9d19-49ed7b16c308" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 620.343515] env[61273]: DEBUG nova.compute.manager [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 620.343795] env[61273]: DEBUG nova.compute.manager [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 620.344089] env[61273]: DEBUG nova.network.neutron [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 620.370541] env[61273]: DEBUG nova.network.neutron [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 620.371254] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] Expecting reply to msg c54eaea6f8d540189fa7c5225942f29d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 620.384340] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c54eaea6f8d540189fa7c5225942f29d [ 620.462182] env[61273]: INFO nova.scheduler.client.report [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Deleted allocations for instance 7f8b08d4-3535-48ab-ba3f-a159511e2a64 [ 620.468565] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Expecting reply to msg 86d5e6eae6104d3b8570fd07d5ce9445 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 620.485130] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 86d5e6eae6104d3b8570fd07d5ce9445 [ 620.693777] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93f363ec-0424-425c-b407-216f79dd248f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.706897] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fd52756-4f60-4716-94a1-4f0ea97b669c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.744952] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b584d5e4-06be-4a3e-8a9c-4f6ae37efaf0 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.753474] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57dd61f5-64dd-43bb-9138-2fc0ff68bf28 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.770327] env[61273]: DEBUG nova.compute.provider_tree [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 620.771214] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 3191425449e94f69a80394ac6d50371b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 620.780660] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3191425449e94f69a80394ac6d50371b [ 620.873909] env[61273]: DEBUG nova.network.neutron [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 620.874972] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] Expecting reply to msg dec5edcab714445d9bfe08703c92d3f1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 620.883805] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dec5edcab714445d9bfe08703c92d3f1 [ 620.971516] env[61273]: DEBUG oslo_concurrency.lockutils [None req-57518784-2066-4fc9-af51-753b31e55ef6 tempest-ServerRescueNegativeTestJSON-1657136755 tempest-ServerRescueNegativeTestJSON-1657136755-project-member] Lock "7f8b08d4-3535-48ab-ba3f-a159511e2a64" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.801s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 620.973476] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] Expecting reply to msg 39eaa4cd84574c91a4c0e7f7d03925d8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 620.994728] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 39eaa4cd84574c91a4c0e7f7d03925d8 [ 621.274974] env[61273]: DEBUG nova.scheduler.client.report [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 621.279052] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 5cffce46aa6c4bedb80ab7ad658d257d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 621.297010] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5cffce46aa6c4bedb80ab7ad658d257d [ 621.377859] env[61273]: INFO nova.compute.manager [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] [instance: 7d1f7566-8e5e-476c-9d19-49ed7b16c308] Took 1.03 seconds to deallocate network for instance. [ 621.380441] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] Expecting reply to msg 78780c4fe1274848a8942f6dd1483859 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 621.416610] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 78780c4fe1274848a8942f6dd1483859 [ 621.478979] env[61273]: DEBUG nova.compute.manager [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 621.482167] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] Expecting reply to msg f637ace64cb24ee68cfd1e09396e6138 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 621.537799] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f637ace64cb24ee68cfd1e09396e6138 [ 621.720309] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 621.720604] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 621.721330] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 0c79f43d3bb44339829874994228ae77 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 621.735485] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0c79f43d3bb44339829874994228ae77 [ 621.789498] env[61273]: DEBUG oslo_concurrency.lockutils [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.567s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 621.790110] env[61273]: DEBUG nova.compute.manager [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 621.792216] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg f6681cacd7a74610947d7590c9664d73 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 621.793354] env[61273]: DEBUG oslo_concurrency.lockutils [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.291s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 621.806157] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 8eb538427c9d4daf9f5737d67585fc52 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 621.839506] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f6681cacd7a74610947d7590c9664d73 [ 621.852742] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8eb538427c9d4daf9f5737d67585fc52 [ 621.885429] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] Expecting reply to msg eb9c96ec3e774cc4926b56eef8940695 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 621.940839] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eb9c96ec3e774cc4926b56eef8940695 [ 622.020806] env[61273]: DEBUG oslo_concurrency.lockutils [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 622.227758] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 622.227758] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Starting heal instance info cache {{(pid=61273) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9926}} [ 622.227758] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Rebuilding the list of instances to heal {{(pid=61273) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 622.227758] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 6768324d48224e40a4fa3b6f4fdda992 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 622.241483] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6768324d48224e40a4fa3b6f4fdda992 [ 622.310592] env[61273]: DEBUG nova.compute.utils [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 622.311211] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg d59228bcbead4b2b996938e7a239842b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 622.315232] env[61273]: DEBUG nova.compute.manager [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 622.315390] env[61273]: DEBUG nova.network.neutron [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 622.322688] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d59228bcbead4b2b996938e7a239842b [ 622.363025] env[61273]: DEBUG nova.policy [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3078a2af81b248f8b100f58ee66a5a86', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c06b50a7aaa742afbbd0c6fc56c3d131', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 622.411440] env[61273]: INFO nova.scheduler.client.report [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] Deleted allocations for instance 7d1f7566-8e5e-476c-9d19-49ed7b16c308 [ 622.417169] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] Expecting reply to msg 424c3b7ba3cb465b84ca788f051bad6d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 622.430572] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 424c3b7ba3cb465b84ca788f051bad6d [ 622.721829] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-556f936b-5e93-4334-8d0f-b952e187b5c4 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.733290] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] [instance: 1804f229-97b9-4ee3-933d-715431a900f8] Skipping network cache update for instance because it is Building. {{(pid=61273) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 622.733461] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] Skipping network cache update for instance because it is Building. {{(pid=61273) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 622.733594] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Didn't find any instances for network info cache update. {{(pid=61273) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10012}} [ 622.734221] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 622.734800] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a59ad1f-c11f-471e-ad65-d72027e0b993 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.738071] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 622.738248] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 622.738780] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 622.738944] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 622.739093] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 622.739222] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61273) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10545}} [ 622.739364] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager.update_available_resource {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 622.740407] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 4ff1bd84f7944fdfaf74e2653ed2ea00 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 622.764477] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4ff1bd84f7944fdfaf74e2653ed2ea00 [ 622.765392] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02cbbb19-5e93-47e6-a6cd-f9c7c386fccf {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.774507] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-317f5cac-357f-4118-83c9-5ae1f3ab8d5d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.788211] env[61273]: DEBUG nova.compute.provider_tree [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 622.788742] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg b472c72edf4443949bd8372d13e5c9d2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 622.806332] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b472c72edf4443949bd8372d13e5c9d2 [ 622.809716] env[61273]: DEBUG nova.network.neutron [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] Successfully created port: f50cebee-32bc-48a5-94cd-9978e48c02f5 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 622.816484] env[61273]: DEBUG nova.compute.manager [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 622.819113] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 02170ff7140f409fbf0fe2fbd92003ce in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 622.863295] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 02170ff7140f409fbf0fe2fbd92003ce [ 622.919412] env[61273]: DEBUG oslo_concurrency.lockutils [None req-59065584-7b1c-496c-a06f-36029ba615b1 tempest-ServerDiagnosticsTest-1786720603 tempest-ServerDiagnosticsTest-1786720603-project-member] Lock "7d1f7566-8e5e-476c-9d19-49ed7b16c308" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.662s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 622.920402] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] Expecting reply to msg 3f358fb3bdd44f5498e8540e12076b45 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 622.933275] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3f358fb3bdd44f5498e8540e12076b45 [ 623.243006] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 623.298456] env[61273]: DEBUG nova.scheduler.client.report [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 623.298456] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg e8dbb5c51dd9494982e34dc47437a073 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 623.310353] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e8dbb5c51dd9494982e34dc47437a073 [ 623.330267] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 2038278aab404283aff0fdf6f5f16bb6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 623.370104] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2038278aab404283aff0fdf6f5f16bb6 [ 623.426412] env[61273]: DEBUG nova.compute.manager [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 623.428370] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] Expecting reply to msg 121ce9190b6b45a8a11fa0e1c246b633 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 623.476396] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 121ce9190b6b45a8a11fa0e1c246b633 [ 623.801486] env[61273]: DEBUG oslo_concurrency.lockutils [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.008s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 623.802155] env[61273]: ERROR nova.compute.manager [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port eca6b818-5b64-4a66-a817-2488dc457863, please check neutron logs for more information. [ 623.802155] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] Traceback (most recent call last): [ 623.802155] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 623.802155] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] self.driver.spawn(context, instance, image_meta, [ 623.802155] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 623.802155] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 623.802155] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 623.802155] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] vm_ref = self.build_virtual_machine(instance, [ 623.802155] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 623.802155] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] vif_infos = vmwarevif.get_vif_info(self._session, [ 623.802155] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 623.802504] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] for vif in network_info: [ 623.802504] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 623.802504] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] return self._sync_wrapper(fn, *args, **kwargs) [ 623.802504] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 623.802504] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] self.wait() [ 623.802504] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 623.802504] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] self[:] = self._gt.wait() [ 623.802504] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 623.802504] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] return self._exit_event.wait() [ 623.802504] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 623.802504] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] result = hub.switch() [ 623.802504] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 623.802504] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] return self.greenlet.switch() [ 623.802838] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 623.802838] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] result = function(*args, **kwargs) [ 623.802838] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 623.802838] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] return func(*args, **kwargs) [ 623.802838] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 623.802838] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] raise e [ 623.802838] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 623.802838] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] nwinfo = self.network_api.allocate_for_instance( [ 623.802838] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 623.802838] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] created_port_ids = self._update_ports_for_instance( [ 623.802838] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 623.802838] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] with excutils.save_and_reraise_exception(): [ 623.802838] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 623.803144] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] self.force_reraise() [ 623.803144] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 623.803144] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] raise self.value [ 623.803144] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 623.803144] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] updated_port = self._update_port( [ 623.803144] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 623.803144] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] _ensure_no_port_binding_failure(port) [ 623.803144] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 623.803144] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] raise exception.PortBindingFailed(port_id=port['id']) [ 623.803144] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] nova.exception.PortBindingFailed: Binding failed for port eca6b818-5b64-4a66-a817-2488dc457863, please check neutron logs for more information. [ 623.803144] env[61273]: ERROR nova.compute.manager [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] [ 623.803401] env[61273]: DEBUG nova.compute.utils [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] Binding failed for port eca6b818-5b64-4a66-a817-2488dc457863, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 623.804193] env[61273]: DEBUG oslo_concurrency.lockutils [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.179s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 623.805398] env[61273]: INFO nova.compute.claims [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 623.807599] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] Expecting reply to msg 1ad76b748b514bca94af394cb3c302f6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 623.809037] env[61273]: DEBUG nova.compute.manager [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] Build of instance 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7 was re-scheduled: Binding failed for port eca6b818-5b64-4a66-a817-2488dc457863, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 623.810145] env[61273]: DEBUG nova.compute.manager [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 623.810145] env[61273]: DEBUG oslo_concurrency.lockutils [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Acquiring lock "refresh_cache-1d84bcbd-9b55-4d6f-b6c7-24391c8600a7" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 623.810145] env[61273]: DEBUG oslo_concurrency.lockutils [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Acquired lock "refresh_cache-1d84bcbd-9b55-4d6f-b6c7-24391c8600a7" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 623.810145] env[61273]: DEBUG nova.network.neutron [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 623.810393] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 7fd8f29a1ab24e77ae35eff1a2b4a95f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 623.818271] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7fd8f29a1ab24e77ae35eff1a2b4a95f [ 623.835634] env[61273]: DEBUG nova.compute.manager [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 623.878717] env[61273]: DEBUG nova.virt.hardware [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 623.878996] env[61273]: DEBUG nova.virt.hardware [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 623.879171] env[61273]: DEBUG nova.virt.hardware [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 623.879360] env[61273]: DEBUG nova.virt.hardware [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 623.880035] env[61273]: DEBUG nova.virt.hardware [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 623.880035] env[61273]: DEBUG nova.virt.hardware [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 623.880035] env[61273]: DEBUG nova.virt.hardware [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 623.880169] env[61273]: DEBUG nova.virt.hardware [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 623.881078] env[61273]: DEBUG nova.virt.hardware [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 623.881078] env[61273]: DEBUG nova.virt.hardware [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 623.881078] env[61273]: DEBUG nova.virt.hardware [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 623.884035] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27dd18df-db77-483e-8d8f-a3bcde141e3c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.884513] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1ad76b748b514bca94af394cb3c302f6 [ 623.890983] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5c1888d-7b59-4b09-97c1-6f91599ecee8 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.954085] env[61273]: DEBUG oslo_concurrency.lockutils [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 624.318219] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] Expecting reply to msg e482724631a94f2ab0541627ec04d700 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 624.321753] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e482724631a94f2ab0541627ec04d700 [ 624.342196] env[61273]: DEBUG nova.network.neutron [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 624.479299] env[61273]: DEBUG nova.network.neutron [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 624.479299] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 6cb50beb964f462e9c075d0138d93d2f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 624.486986] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6cb50beb964f462e9c075d0138d93d2f [ 624.628134] env[61273]: DEBUG nova.compute.manager [req-ce8a28c4-468f-4158-b145-fb055ac2cf10 req-0a9dd937-4a0a-4665-8f2e-13befc8db52e service nova] [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] Received event network-changed-f50cebee-32bc-48a5-94cd-9978e48c02f5 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 624.628134] env[61273]: DEBUG nova.compute.manager [req-ce8a28c4-468f-4158-b145-fb055ac2cf10 req-0a9dd937-4a0a-4665-8f2e-13befc8db52e service nova] [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] Refreshing instance network info cache due to event network-changed-f50cebee-32bc-48a5-94cd-9978e48c02f5. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 624.628134] env[61273]: DEBUG oslo_concurrency.lockutils [req-ce8a28c4-468f-4158-b145-fb055ac2cf10 req-0a9dd937-4a0a-4665-8f2e-13befc8db52e service nova] Acquiring lock "refresh_cache-017b1da4-7c9b-477d-92a3-29b2248317d3" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 624.628134] env[61273]: DEBUG oslo_concurrency.lockutils [req-ce8a28c4-468f-4158-b145-fb055ac2cf10 req-0a9dd937-4a0a-4665-8f2e-13befc8db52e service nova] Acquired lock "refresh_cache-017b1da4-7c9b-477d-92a3-29b2248317d3" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 624.628134] env[61273]: DEBUG nova.network.neutron [req-ce8a28c4-468f-4158-b145-fb055ac2cf10 req-0a9dd937-4a0a-4665-8f2e-13befc8db52e service nova] [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] Refreshing network info cache for port f50cebee-32bc-48a5-94cd-9978e48c02f5 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 624.628350] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-ce8a28c4-468f-4158-b145-fb055ac2cf10 req-0a9dd937-4a0a-4665-8f2e-13befc8db52e service nova] Expecting reply to msg ebbe329014d448f6a66e00f06904049d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 624.631927] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ebbe329014d448f6a66e00f06904049d [ 624.876168] env[61273]: ERROR nova.compute.manager [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port f50cebee-32bc-48a5-94cd-9978e48c02f5, please check neutron logs for more information. [ 624.876168] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 624.876168] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 624.876168] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 624.876168] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 624.876168] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 624.876168] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 624.876168] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 624.876168] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 624.876168] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 624.876168] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 624.876168] env[61273]: ERROR nova.compute.manager raise self.value [ 624.876168] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 624.876168] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 624.876168] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 624.876168] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 624.876634] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 624.876634] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 624.876634] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port f50cebee-32bc-48a5-94cd-9978e48c02f5, please check neutron logs for more information. [ 624.876634] env[61273]: ERROR nova.compute.manager [ 624.876634] env[61273]: Traceback (most recent call last): [ 624.876634] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 624.876634] env[61273]: listener.cb(fileno) [ 624.876634] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 624.876634] env[61273]: result = function(*args, **kwargs) [ 624.876634] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 624.876634] env[61273]: return func(*args, **kwargs) [ 624.876634] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 624.876634] env[61273]: raise e [ 624.876634] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 624.876634] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 624.876634] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 624.876634] env[61273]: created_port_ids = self._update_ports_for_instance( [ 624.876634] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 624.876634] env[61273]: with excutils.save_and_reraise_exception(): [ 624.876634] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 624.876634] env[61273]: self.force_reraise() [ 624.876634] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 624.876634] env[61273]: raise self.value [ 624.876634] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 624.876634] env[61273]: updated_port = self._update_port( [ 624.876634] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 624.876634] env[61273]: _ensure_no_port_binding_failure(port) [ 624.876634] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 624.876634] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 624.877713] env[61273]: nova.exception.PortBindingFailed: Binding failed for port f50cebee-32bc-48a5-94cd-9978e48c02f5, please check neutron logs for more information. [ 624.877713] env[61273]: Removing descriptor: 15 [ 624.877713] env[61273]: ERROR nova.compute.manager [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port f50cebee-32bc-48a5-94cd-9978e48c02f5, please check neutron logs for more information. [ 624.877713] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] Traceback (most recent call last): [ 624.877713] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 624.877713] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] yield resources [ 624.877713] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 624.877713] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] self.driver.spawn(context, instance, image_meta, [ 624.877713] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 624.877713] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 624.877713] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 624.877713] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] vm_ref = self.build_virtual_machine(instance, [ 624.878055] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 624.878055] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] vif_infos = vmwarevif.get_vif_info(self._session, [ 624.878055] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 624.878055] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] for vif in network_info: [ 624.878055] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 624.878055] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] return self._sync_wrapper(fn, *args, **kwargs) [ 624.878055] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 624.878055] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] self.wait() [ 624.878055] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 624.878055] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] self[:] = self._gt.wait() [ 624.878055] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 624.878055] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] return self._exit_event.wait() [ 624.878055] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 624.878401] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] result = hub.switch() [ 624.878401] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 624.878401] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] return self.greenlet.switch() [ 624.878401] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 624.878401] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] result = function(*args, **kwargs) [ 624.878401] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 624.878401] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] return func(*args, **kwargs) [ 624.878401] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 624.878401] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] raise e [ 624.878401] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 624.878401] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] nwinfo = self.network_api.allocate_for_instance( [ 624.878401] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 624.878401] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] created_port_ids = self._update_ports_for_instance( [ 624.878708] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 624.878708] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] with excutils.save_and_reraise_exception(): [ 624.878708] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 624.878708] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] self.force_reraise() [ 624.878708] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 624.878708] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] raise self.value [ 624.878708] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 624.878708] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] updated_port = self._update_port( [ 624.878708] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 624.878708] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] _ensure_no_port_binding_failure(port) [ 624.878708] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 624.878708] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] raise exception.PortBindingFailed(port_id=port['id']) [ 624.879012] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] nova.exception.PortBindingFailed: Binding failed for port f50cebee-32bc-48a5-94cd-9978e48c02f5, please check neutron logs for more information. [ 624.879012] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] [ 624.879012] env[61273]: INFO nova.compute.manager [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] Terminating instance [ 624.879949] env[61273]: DEBUG oslo_concurrency.lockutils [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Acquiring lock "refresh_cache-017b1da4-7c9b-477d-92a3-29b2248317d3" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 624.981319] env[61273]: DEBUG oslo_concurrency.lockutils [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Releasing lock "refresh_cache-1d84bcbd-9b55-4d6f-b6c7-24391c8600a7" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 624.981593] env[61273]: DEBUG nova.compute.manager [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 624.981774] env[61273]: DEBUG nova.compute.manager [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 624.981938] env[61273]: DEBUG nova.network.neutron [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 625.016754] env[61273]: DEBUG nova.network.neutron [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 625.016754] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg fcbf1dfdc8b540a599a825a8b4deb56d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 625.023947] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fcbf1dfdc8b540a599a825a8b4deb56d [ 625.141439] env[61273]: DEBUG nova.network.neutron [req-ce8a28c4-468f-4158-b145-fb055ac2cf10 req-0a9dd937-4a0a-4665-8f2e-13befc8db52e service nova] [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 625.215348] env[61273]: DEBUG nova.network.neutron [req-ce8a28c4-468f-4158-b145-fb055ac2cf10 req-0a9dd937-4a0a-4665-8f2e-13befc8db52e service nova] [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 625.215884] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-ce8a28c4-468f-4158-b145-fb055ac2cf10 req-0a9dd937-4a0a-4665-8f2e-13befc8db52e service nova] Expecting reply to msg 9068dd77e6b741acad1773a1c0f7813c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 625.231181] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9068dd77e6b741acad1773a1c0f7813c [ 625.250336] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35082f29-00fc-4915-87ca-5a9d0d904a40 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.258510] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caebcd3d-238e-4d8b-803e-07c0c8979950 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.292254] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9817a5fa-6093-474d-8931-b161b66ab277 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.300822] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f144f7d-7af5-45b5-8469-241ddebbb62f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.315036] env[61273]: DEBUG nova.compute.provider_tree [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 625.315552] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] Expecting reply to msg 1d95e9ad0c594fb7a24f5a96d40a755f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 625.323449] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1d95e9ad0c594fb7a24f5a96d40a755f [ 625.519313] env[61273]: DEBUG nova.network.neutron [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 625.519960] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 2c723d6e99874c2e97948b83a306fc19 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 625.538012] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2c723d6e99874c2e97948b83a306fc19 [ 625.724937] env[61273]: DEBUG oslo_concurrency.lockutils [req-ce8a28c4-468f-4158-b145-fb055ac2cf10 req-0a9dd937-4a0a-4665-8f2e-13befc8db52e service nova] Releasing lock "refresh_cache-017b1da4-7c9b-477d-92a3-29b2248317d3" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 625.725388] env[61273]: DEBUG oslo_concurrency.lockutils [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Acquired lock "refresh_cache-017b1da4-7c9b-477d-92a3-29b2248317d3" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 625.725579] env[61273]: DEBUG nova.network.neutron [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 625.726044] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 3523742448954df6a61ddd7ea1a11c89 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 625.734782] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3523742448954df6a61ddd7ea1a11c89 [ 625.820038] env[61273]: DEBUG nova.scheduler.client.report [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 625.822537] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] Expecting reply to msg 1e35b9d649504bd29d577e59dc30b505 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 625.838505] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1e35b9d649504bd29d577e59dc30b505 [ 626.022440] env[61273]: INFO nova.compute.manager [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7] Took 1.04 seconds to deallocate network for instance. [ 626.024195] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 730f6ac5ec53456b97342e6ac632454a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 626.064167] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 730f6ac5ec53456b97342e6ac632454a [ 626.255060] env[61273]: DEBUG nova.network.neutron [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 626.325377] env[61273]: DEBUG oslo_concurrency.lockutils [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.521s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 626.325897] env[61273]: DEBUG nova.compute.manager [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 626.327529] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] Expecting reply to msg c4c1c14e2e6c469597c3d6760350f8d6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 626.328917] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.682s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 626.341304] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] Expecting reply to msg 04303de49c374834a5811c8cc56ae630 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 626.343401] env[61273]: DEBUG nova.network.neutron [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 626.343890] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg e2a79591a6894ad091f963cddc152f0f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 626.372451] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e2a79591a6894ad091f963cddc152f0f [ 626.373876] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c4c1c14e2e6c469597c3d6760350f8d6 [ 626.385164] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 04303de49c374834a5811c8cc56ae630 [ 626.529397] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 6e828ae1b03e4bacbf41816eea1df494 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 626.569217] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6e828ae1b03e4bacbf41816eea1df494 [ 626.793106] env[61273]: DEBUG nova.compute.manager [req-a3ff4fdf-09f4-4622-a52b-d04cb9066b5a req-44179447-27cf-4ec3-9b85-48753c30a544 service nova] [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] Received event network-vif-deleted-f50cebee-32bc-48a5-94cd-9978e48c02f5 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 626.844032] env[61273]: DEBUG nova.compute.utils [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 626.844711] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] Expecting reply to msg ee3c006f8fa44ae0b1d2c2b7b4029e85 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 626.845736] env[61273]: DEBUG nova.compute.manager [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 626.845911] env[61273]: DEBUG nova.network.neutron [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 626.851130] env[61273]: DEBUG oslo_concurrency.lockutils [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Releasing lock "refresh_cache-017b1da4-7c9b-477d-92a3-29b2248317d3" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 626.851375] env[61273]: DEBUG nova.compute.manager [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 626.851577] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 626.853130] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ce5cbc3f-e9fc-4523-a3dd-537289004e96 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.860109] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ee3c006f8fa44ae0b1d2c2b7b4029e85 [ 626.863684] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db2f6ab0-9cbc-4f7c-8a1a-b05cd5afbd3d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.894716] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 017b1da4-7c9b-477d-92a3-29b2248317d3 could not be found. [ 626.894937] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 626.895108] env[61273]: INFO nova.compute.manager [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] Took 0.04 seconds to destroy the instance on the hypervisor. [ 626.895342] env[61273]: DEBUG oslo.service.loopingcall [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 626.895567] env[61273]: DEBUG nova.compute.manager [-] [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 626.895654] env[61273]: DEBUG nova.network.neutron [-] [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 626.909367] env[61273]: DEBUG nova.policy [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '181d2abbc63840d3b1e6b51ec2c6ee0d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '49494417788d472c93cc089b87e577dc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 626.958825] env[61273]: DEBUG nova.network.neutron [-] [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 626.959586] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 79e1fd7f072d48eba716839dda075c57 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 626.967547] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 79e1fd7f072d48eba716839dda075c57 [ 627.052787] env[61273]: INFO nova.scheduler.client.report [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Deleted allocations for instance 1d84bcbd-9b55-4d6f-b6c7-24391c8600a7 [ 627.072554] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 1a87c096bd68438186fa48aff2e1ee18 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 627.088793] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1a87c096bd68438186fa48aff2e1ee18 [ 627.296416] env[61273]: DEBUG nova.network.neutron [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] Successfully created port: 35e64d9a-ac71-4a0f-abb8-c800619dff5a {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 627.334038] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52a590ba-7c4d-423f-8cf5-970f46a4fc61 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.344236] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68531bf7-bfc6-45f4-83eb-7ce07a0a8f22 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.372327] env[61273]: DEBUG nova.compute.manager [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 627.374797] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] Expecting reply to msg d4a348e865ef4eed884a591e472130df in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 627.380033] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-923e729c-f6e0-458f-b227-85df24e53aca {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.385088] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8b92783-1fd1-4976-93c5-64a33221bb19 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.409829] env[61273]: DEBUG nova.compute.provider_tree [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 627.410347] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] Expecting reply to msg 0e032dd9ed9742e782b1c31d1b940674 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 627.411485] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d4a348e865ef4eed884a591e472130df [ 627.417646] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0e032dd9ed9742e782b1c31d1b940674 [ 627.461961] env[61273]: DEBUG nova.network.neutron [-] [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 627.462482] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg f5227f56171240f7825c14ec787fac37 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 627.470989] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f5227f56171240f7825c14ec787fac37 [ 627.579286] env[61273]: DEBUG oslo_concurrency.lockutils [None req-15bbf0be-5de5-4231-b593-9e16a99cd222 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Lock "1d84bcbd-9b55-4d6f-b6c7-24391c8600a7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 80.636s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 627.579286] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Expecting reply to msg 8bae55517b514cca9fb9a059af42d1fa in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 627.592121] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8bae55517b514cca9fb9a059af42d1fa [ 627.880037] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] Expecting reply to msg 3455b34dbb634e238b763b4faca73f0f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 627.913101] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3455b34dbb634e238b763b4faca73f0f [ 627.914162] env[61273]: DEBUG nova.scheduler.client.report [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 627.916481] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] Expecting reply to msg 856efed8d21b424784ee9090766a455b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 627.930890] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 856efed8d21b424784ee9090766a455b [ 627.964949] env[61273]: INFO nova.compute.manager [-] [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] Took 1.07 seconds to deallocate network for instance. [ 627.967689] env[61273]: DEBUG nova.compute.claims [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 627.967964] env[61273]: DEBUG oslo_concurrency.lockutils [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 628.080620] env[61273]: DEBUG nova.compute.manager [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] [instance: 109fc11e-d640-4617-99a3-0defe0a5aa6c] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 628.082493] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Expecting reply to msg dc73e60f118a4c4c856f46fb2677b431 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 628.119289] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dc73e60f118a4c4c856f46fb2677b431 [ 628.312272] env[61273]: ERROR nova.compute.manager [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 35e64d9a-ac71-4a0f-abb8-c800619dff5a, please check neutron logs for more information. [ 628.312272] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 628.312272] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 628.312272] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 628.312272] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 628.312272] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 628.312272] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 628.312272] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 628.312272] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 628.312272] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 628.312272] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 628.312272] env[61273]: ERROR nova.compute.manager raise self.value [ 628.312272] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 628.312272] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 628.312272] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 628.312272] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 628.312767] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 628.312767] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 628.312767] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 35e64d9a-ac71-4a0f-abb8-c800619dff5a, please check neutron logs for more information. [ 628.312767] env[61273]: ERROR nova.compute.manager [ 628.312767] env[61273]: Traceback (most recent call last): [ 628.312767] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 628.312767] env[61273]: listener.cb(fileno) [ 628.312767] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 628.312767] env[61273]: result = function(*args, **kwargs) [ 628.312767] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 628.312767] env[61273]: return func(*args, **kwargs) [ 628.312767] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 628.312767] env[61273]: raise e [ 628.312767] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 628.312767] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 628.312767] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 628.312767] env[61273]: created_port_ids = self._update_ports_for_instance( [ 628.312767] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 628.312767] env[61273]: with excutils.save_and_reraise_exception(): [ 628.312767] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 628.312767] env[61273]: self.force_reraise() [ 628.312767] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 628.312767] env[61273]: raise self.value [ 628.312767] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 628.312767] env[61273]: updated_port = self._update_port( [ 628.312767] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 628.312767] env[61273]: _ensure_no_port_binding_failure(port) [ 628.312767] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 628.312767] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 628.313488] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 35e64d9a-ac71-4a0f-abb8-c800619dff5a, please check neutron logs for more information. [ 628.313488] env[61273]: Removing descriptor: 15 [ 628.384467] env[61273]: DEBUG nova.compute.manager [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 628.404963] env[61273]: DEBUG nova.virt.hardware [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 628.405202] env[61273]: DEBUG nova.virt.hardware [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 628.405357] env[61273]: DEBUG nova.virt.hardware [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 628.405537] env[61273]: DEBUG nova.virt.hardware [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 628.405682] env[61273]: DEBUG nova.virt.hardware [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 628.405829] env[61273]: DEBUG nova.virt.hardware [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 628.406074] env[61273]: DEBUG nova.virt.hardware [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 628.406181] env[61273]: DEBUG nova.virt.hardware [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 628.406343] env[61273]: DEBUG nova.virt.hardware [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 628.406500] env[61273]: DEBUG nova.virt.hardware [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 628.406667] env[61273]: DEBUG nova.virt.hardware [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 628.407539] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d98c239-b4c1-4dac-aff3-728f5b56257a {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.428621] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.092s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 628.428621] env[61273]: ERROR nova.compute.manager [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] [instance: 1804f229-97b9-4ee3-933d-715431a900f8] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 2a417537-62ce-4faf-b880-d5f49a9deb7f, please check neutron logs for more information. [ 628.428621] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] Traceback (most recent call last): [ 628.428621] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 628.428621] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] self.driver.spawn(context, instance, image_meta, [ 628.428621] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 628.428621] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 628.428621] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 628.428621] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] vm_ref = self.build_virtual_machine(instance, [ 628.428865] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 628.428865] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] vif_infos = vmwarevif.get_vif_info(self._session, [ 628.428865] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 628.428865] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] for vif in network_info: [ 628.428865] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 628.428865] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] return self._sync_wrapper(fn, *args, **kwargs) [ 628.428865] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 628.428865] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] self.wait() [ 628.428865] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 628.428865] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] self[:] = self._gt.wait() [ 628.428865] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 628.428865] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] return self._exit_event.wait() [ 628.428865] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 628.429191] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] result = hub.switch() [ 628.429191] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 628.429191] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] return self.greenlet.switch() [ 628.429191] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 628.429191] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] result = function(*args, **kwargs) [ 628.429191] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 628.429191] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] return func(*args, **kwargs) [ 628.429191] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 628.429191] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] raise e [ 628.429191] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 628.429191] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] nwinfo = self.network_api.allocate_for_instance( [ 628.429191] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 628.429191] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] created_port_ids = self._update_ports_for_instance( [ 628.429550] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 628.429550] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] with excutils.save_and_reraise_exception(): [ 628.429550] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 628.429550] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] self.force_reraise() [ 628.429550] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 628.429550] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] raise self.value [ 628.429550] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 628.429550] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] updated_port = self._update_port( [ 628.429550] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 628.429550] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] _ensure_no_port_binding_failure(port) [ 628.429550] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 628.429550] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] raise exception.PortBindingFailed(port_id=port['id']) [ 628.429851] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] nova.exception.PortBindingFailed: Binding failed for port 2a417537-62ce-4faf-b880-d5f49a9deb7f, please check neutron logs for more information. [ 628.429851] env[61273]: ERROR nova.compute.manager [instance: 1804f229-97b9-4ee3-933d-715431a900f8] [ 628.429851] env[61273]: DEBUG nova.compute.utils [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] [instance: 1804f229-97b9-4ee3-933d-715431a900f8] Binding failed for port 2a417537-62ce-4faf-b880-d5f49a9deb7f, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 628.429851] env[61273]: DEBUG oslo_concurrency.lockutils [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.877s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 628.429851] env[61273]: INFO nova.compute.claims [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 628.429851] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Expecting reply to msg a87345724ecb4a34be2cd8884d9448a9 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 628.430026] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c8e4736-a970-40f0-9226-aed86ae2a679 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.433586] env[61273]: DEBUG nova.compute.manager [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] [instance: 1804f229-97b9-4ee3-933d-715431a900f8] Build of instance 1804f229-97b9-4ee3-933d-715431a900f8 was re-scheduled: Binding failed for port 2a417537-62ce-4faf-b880-d5f49a9deb7f, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 628.434040] env[61273]: DEBUG nova.compute.manager [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] [instance: 1804f229-97b9-4ee3-933d-715431a900f8] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 628.434261] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] Acquiring lock "refresh_cache-1804f229-97b9-4ee3-933d-715431a900f8" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 628.434401] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] Acquired lock "refresh_cache-1804f229-97b9-4ee3-933d-715431a900f8" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 628.434556] env[61273]: DEBUG nova.network.neutron [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] [instance: 1804f229-97b9-4ee3-933d-715431a900f8] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 628.434935] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] Expecting reply to msg 466e1b088af54342af81080854ae476b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 628.460295] env[61273]: ERROR nova.compute.manager [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 35e64d9a-ac71-4a0f-abb8-c800619dff5a, please check neutron logs for more information. [ 628.460295] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] Traceback (most recent call last): [ 628.460295] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 628.460295] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] yield resources [ 628.460295] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 628.460295] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] self.driver.spawn(context, instance, image_meta, [ 628.460295] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 628.460295] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] self._vmops.spawn(context, instance, image_meta, injected_files, [ 628.460295] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 628.460295] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] vm_ref = self.build_virtual_machine(instance, [ 628.460295] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 628.460795] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] vif_infos = vmwarevif.get_vif_info(self._session, [ 628.460795] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 628.460795] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] for vif in network_info: [ 628.460795] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 628.460795] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] return self._sync_wrapper(fn, *args, **kwargs) [ 628.460795] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 628.460795] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] self.wait() [ 628.460795] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 628.460795] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] self[:] = self._gt.wait() [ 628.460795] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 628.460795] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] return self._exit_event.wait() [ 628.460795] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 628.460795] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] current.throw(*self._exc) [ 628.461140] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 628.461140] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] result = function(*args, **kwargs) [ 628.461140] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 628.461140] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] return func(*args, **kwargs) [ 628.461140] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 628.461140] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] raise e [ 628.461140] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 628.461140] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] nwinfo = self.network_api.allocate_for_instance( [ 628.461140] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 628.461140] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] created_port_ids = self._update_ports_for_instance( [ 628.461140] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 628.461140] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] with excutils.save_and_reraise_exception(): [ 628.461140] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 628.461510] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] self.force_reraise() [ 628.461510] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 628.461510] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] raise self.value [ 628.461510] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 628.461510] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] updated_port = self._update_port( [ 628.461510] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 628.461510] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] _ensure_no_port_binding_failure(port) [ 628.461510] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 628.461510] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] raise exception.PortBindingFailed(port_id=port['id']) [ 628.461510] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] nova.exception.PortBindingFailed: Binding failed for port 35e64d9a-ac71-4a0f-abb8-c800619dff5a, please check neutron logs for more information. [ 628.461510] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] [ 628.461510] env[61273]: INFO nova.compute.manager [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] Terminating instance [ 628.462063] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 466e1b088af54342af81080854ae476b [ 628.462789] env[61273]: DEBUG oslo_concurrency.lockutils [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] Acquiring lock "refresh_cache-32b57d1b-d35f-488e-be23-9119f2f56562" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 628.463031] env[61273]: DEBUG oslo_concurrency.lockutils [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] Acquired lock "refresh_cache-32b57d1b-d35f-488e-be23-9119f2f56562" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 628.463132] env[61273]: DEBUG nova.network.neutron [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 628.463550] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] Expecting reply to msg 3003aa9361d64bd0afa7649f801d069b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 628.467156] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a87345724ecb4a34be2cd8884d9448a9 [ 628.475879] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3003aa9361d64bd0afa7649f801d069b [ 628.604103] env[61273]: DEBUG oslo_concurrency.lockutils [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 628.938446] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Expecting reply to msg 1dc1572680854341bf442bdd0afb98d6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 628.948269] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1dc1572680854341bf442bdd0afb98d6 [ 628.962753] env[61273]: DEBUG nova.network.neutron [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] [instance: 1804f229-97b9-4ee3-933d-715431a900f8] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 629.007662] env[61273]: DEBUG nova.network.neutron [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 629.050106] env[61273]: DEBUG nova.compute.manager [req-2428a181-500f-4323-9111-34394d5bb869 req-5c9b6410-10ad-459e-93f2-22fbb01b1557 service nova] [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] Received event network-changed-35e64d9a-ac71-4a0f-abb8-c800619dff5a {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 629.050308] env[61273]: DEBUG nova.compute.manager [req-2428a181-500f-4323-9111-34394d5bb869 req-5c9b6410-10ad-459e-93f2-22fbb01b1557 service nova] [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] Refreshing instance network info cache due to event network-changed-35e64d9a-ac71-4a0f-abb8-c800619dff5a. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 629.050495] env[61273]: DEBUG oslo_concurrency.lockutils [req-2428a181-500f-4323-9111-34394d5bb869 req-5c9b6410-10ad-459e-93f2-22fbb01b1557 service nova] Acquiring lock "refresh_cache-32b57d1b-d35f-488e-be23-9119f2f56562" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 629.092522] env[61273]: DEBUG nova.network.neutron [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] [instance: 1804f229-97b9-4ee3-933d-715431a900f8] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 629.093124] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] Expecting reply to msg 2b0f2dfe6a4944bf85b40ba8bee94786 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 629.102899] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2b0f2dfe6a4944bf85b40ba8bee94786 [ 629.143584] env[61273]: DEBUG nova.network.neutron [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 629.144346] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] Expecting reply to msg ba37dc9158f4407aa9adc63b2ce0561c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 629.158905] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ba37dc9158f4407aa9adc63b2ce0561c [ 629.172761] env[61273]: DEBUG oslo_concurrency.lockutils [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] Acquiring lock "21213cff-55b3-48fd-91b4-6718f7819bc3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 629.173080] env[61273]: DEBUG oslo_concurrency.lockutils [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] Lock "21213cff-55b3-48fd-91b4-6718f7819bc3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 629.469484] env[61273]: DEBUG oslo_concurrency.lockutils [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Acquiring lock "f3df4816-ef02-4ecc-a8ca-4f0eaf286218" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 629.469767] env[61273]: DEBUG oslo_concurrency.lockutils [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Lock "f3df4816-ef02-4ecc-a8ca-4f0eaf286218" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 629.595523] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] Releasing lock "refresh_cache-1804f229-97b9-4ee3-933d-715431a900f8" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 629.595786] env[61273]: DEBUG nova.compute.manager [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 629.595938] env[61273]: DEBUG nova.compute.manager [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] [instance: 1804f229-97b9-4ee3-933d-715431a900f8] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 629.596120] env[61273]: DEBUG nova.network.neutron [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] [instance: 1804f229-97b9-4ee3-933d-715431a900f8] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 629.612199] env[61273]: DEBUG nova.network.neutron [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] [instance: 1804f229-97b9-4ee3-933d-715431a900f8] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 629.612951] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] Expecting reply to msg f2caed5d6df54124b0f1bb1d17071814 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 629.621236] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f2caed5d6df54124b0f1bb1d17071814 [ 629.646873] env[61273]: DEBUG oslo_concurrency.lockutils [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] Releasing lock "refresh_cache-32b57d1b-d35f-488e-be23-9119f2f56562" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 629.647383] env[61273]: DEBUG nova.compute.manager [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 629.647700] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 629.648269] env[61273]: DEBUG oslo_concurrency.lockutils [req-2428a181-500f-4323-9111-34394d5bb869 req-5c9b6410-10ad-459e-93f2-22fbb01b1557 service nova] Acquired lock "refresh_cache-32b57d1b-d35f-488e-be23-9119f2f56562" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 629.648577] env[61273]: DEBUG nova.network.neutron [req-2428a181-500f-4323-9111-34394d5bb869 req-5c9b6410-10ad-459e-93f2-22fbb01b1557 service nova] [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] Refreshing network info cache for port 35e64d9a-ac71-4a0f-abb8-c800619dff5a {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 629.649258] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-2428a181-500f-4323-9111-34394d5bb869 req-5c9b6410-10ad-459e-93f2-22fbb01b1557 service nova] Expecting reply to msg 10dd9a4171d9493499cd85d892575111 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 629.650137] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-419ecda1-0ff6-4ce4-9ad4-7365f5f6bf33 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.656899] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 10dd9a4171d9493499cd85d892575111 [ 629.660886] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78c03206-2028-48de-87f5-babf00c92e42 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.686094] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 32b57d1b-d35f-488e-be23-9119f2f56562 could not be found. [ 629.686457] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 629.686748] env[61273]: INFO nova.compute.manager [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] Took 0.04 seconds to destroy the instance on the hypervisor. [ 629.687087] env[61273]: DEBUG oslo.service.loopingcall [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 629.690766] env[61273]: DEBUG nova.compute.manager [-] [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 629.690868] env[61273]: DEBUG nova.network.neutron [-] [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 629.714005] env[61273]: DEBUG nova.network.neutron [-] [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 629.714677] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg af9e42f642834056beb6b4179fbf735c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 629.722687] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg af9e42f642834056beb6b4179fbf735c [ 629.800556] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2da0149a-9ec0-4aab-b180-105e10e97a48 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.807623] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc5b3966-d924-4125-98bd-b1ad1eb9a1bf {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.836139] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef82888a-fa65-4001-b9be-5c67dd0ea7bc {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.843057] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-262bc794-6699-4e26-a722-80df7a209e77 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.857451] env[61273]: DEBUG nova.compute.provider_tree [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 629.858067] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Expecting reply to msg 62e68e18e4274e08972cdcd49b5c00a4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 629.865758] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 62e68e18e4274e08972cdcd49b5c00a4 [ 630.115397] env[61273]: DEBUG nova.network.neutron [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] [instance: 1804f229-97b9-4ee3-933d-715431a900f8] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 630.115961] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] Expecting reply to msg 480d859c5b7a4a72a1f6541d3b685155 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 630.125383] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 480d859c5b7a4a72a1f6541d3b685155 [ 630.177653] env[61273]: DEBUG nova.network.neutron [req-2428a181-500f-4323-9111-34394d5bb869 req-5c9b6410-10ad-459e-93f2-22fbb01b1557 service nova] [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 630.217659] env[61273]: DEBUG nova.network.neutron [-] [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 630.218146] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 76473198294c4bfb8e41d915d942e49f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 630.236422] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 76473198294c4bfb8e41d915d942e49f [ 630.293794] env[61273]: DEBUG nova.network.neutron [req-2428a181-500f-4323-9111-34394d5bb869 req-5c9b6410-10ad-459e-93f2-22fbb01b1557 service nova] [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 630.294299] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-2428a181-500f-4323-9111-34394d5bb869 req-5c9b6410-10ad-459e-93f2-22fbb01b1557 service nova] Expecting reply to msg dbe545ca9c494552afe33b4cd0dc210a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 630.302111] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dbe545ca9c494552afe33b4cd0dc210a [ 630.360525] env[61273]: DEBUG nova.scheduler.client.report [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 630.363054] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Expecting reply to msg e6cf3213617b45eb9645c5a2211f6575 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 630.376465] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e6cf3213617b45eb9645c5a2211f6575 [ 630.618421] env[61273]: INFO nova.compute.manager [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] [instance: 1804f229-97b9-4ee3-933d-715431a900f8] Took 1.02 seconds to deallocate network for instance. [ 630.620250] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] Expecting reply to msg 3e8d8d304be145e097d05214dba84d9f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 630.659884] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3e8d8d304be145e097d05214dba84d9f [ 630.725877] env[61273]: INFO nova.compute.manager [-] [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] Took 1.03 seconds to deallocate network for instance. [ 630.729839] env[61273]: DEBUG nova.compute.claims [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 630.729839] env[61273]: DEBUG oslo_concurrency.lockutils [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 630.796244] env[61273]: DEBUG oslo_concurrency.lockutils [req-2428a181-500f-4323-9111-34394d5bb869 req-5c9b6410-10ad-459e-93f2-22fbb01b1557 service nova] Releasing lock "refresh_cache-32b57d1b-d35f-488e-be23-9119f2f56562" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 630.796552] env[61273]: DEBUG nova.compute.manager [req-2428a181-500f-4323-9111-34394d5bb869 req-5c9b6410-10ad-459e-93f2-22fbb01b1557 service nova] [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] Received event network-vif-deleted-35e64d9a-ac71-4a0f-abb8-c800619dff5a {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 630.866430] env[61273]: DEBUG oslo_concurrency.lockutils [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.441s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 630.866430] env[61273]: DEBUG nova.compute.manager [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 630.867896] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Expecting reply to msg 3f5a3526da88452ba51fdbfc723a74b4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 630.868992] env[61273]: DEBUG oslo_concurrency.lockutils [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.253s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 630.871392] env[61273]: INFO nova.compute.claims [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] [instance: 13c1d417-4087-46ad-b513-fc3317995d18] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 630.873626] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] Expecting reply to msg 145fcd96e49645bea1b2b3b98873aa46 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 630.899273] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3f5a3526da88452ba51fdbfc723a74b4 [ 630.913062] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 145fcd96e49645bea1b2b3b98873aa46 [ 631.126479] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] Expecting reply to msg bfc7b97df3ed4533bd6a2fe55ebe0cd7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 631.158022] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bfc7b97df3ed4533bd6a2fe55ebe0cd7 [ 631.375735] env[61273]: DEBUG nova.compute.utils [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 631.376451] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Expecting reply to msg 9b9acc3dd80d4b488ea41a82ff41c2f0 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 631.378652] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] Expecting reply to msg 9d1e52ce66f9421985234e5ddf1faedf in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 631.380293] env[61273]: DEBUG nova.compute.manager [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 631.380466] env[61273]: DEBUG nova.network.neutron [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 631.387499] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9d1e52ce66f9421985234e5ddf1faedf [ 631.388580] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9b9acc3dd80d4b488ea41a82ff41c2f0 [ 631.450441] env[61273]: DEBUG nova.policy [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0c66923fec264dcf9cf04011a1463650', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3b11f5c3d754416d81b53d294f3f9631', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 631.654373] env[61273]: INFO nova.scheduler.client.report [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] Deleted allocations for instance 1804f229-97b9-4ee3-933d-715431a900f8 [ 631.660721] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] Expecting reply to msg 8b18ec849e1f488ea5283a332bcd9a5c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 631.673394] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8b18ec849e1f488ea5283a332bcd9a5c [ 631.793819] env[61273]: DEBUG nova.network.neutron [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] Successfully created port: b6e7038c-dfd6-439e-846f-4aba6ca1318b {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 631.884155] env[61273]: DEBUG nova.compute.manager [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 631.885813] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Expecting reply to msg 5bf4f91f349946f7aa0941e139788818 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 631.941182] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5bf4f91f349946f7aa0941e139788818 [ 632.162540] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c40970f8-1d21-4690-85fe-1ed8a0091295 tempest-ServerMetadataNegativeTestJSON-932953273 tempest-ServerMetadataNegativeTestJSON-932953273-project-member] Lock "1804f229-97b9-4ee3-933d-715431a900f8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 83.722s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 632.163170] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] Expecting reply to msg c98cd757da414b23a6e46da2b36d99f2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 632.175818] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c98cd757da414b23a6e46da2b36d99f2 [ 632.330252] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4877fcea-a7d0-4f1f-81b0-fb3e6885b644 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.342963] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35fbaa67-a1b3-4bfe-af91-af376e15f04b {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.379080] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c49a477c-fbf2-4d92-afa0-827177c6a931 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.387578] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-922e7d6a-f02d-45b5-af5c-d81d27f09d9f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.393696] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Expecting reply to msg fa02c3a65d4a4db2afa20d5b5b452ba4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 632.410512] env[61273]: DEBUG nova.compute.provider_tree [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 632.411027] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] Expecting reply to msg e354285cb63747bf9cfdd076d59c2622 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 632.418513] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e354285cb63747bf9cfdd076d59c2622 [ 632.428505] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fa02c3a65d4a4db2afa20d5b5b452ba4 [ 632.666288] env[61273]: DEBUG nova.compute.manager [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 632.668148] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] Expecting reply to msg a85a24294ea84ef0a2006cb2d14d16ad in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 632.716063] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a85a24294ea84ef0a2006cb2d14d16ad [ 632.897146] env[61273]: DEBUG nova.compute.manager [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 632.917578] env[61273]: DEBUG nova.scheduler.client.report [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 632.919917] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] Expecting reply to msg 3614b70ff09f4caa9d6cd887fdd3b906 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 632.925966] env[61273]: DEBUG nova.virt.hardware [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 632.926206] env[61273]: DEBUG nova.virt.hardware [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 632.926368] env[61273]: DEBUG nova.virt.hardware [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 632.926547] env[61273]: DEBUG nova.virt.hardware [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 632.926693] env[61273]: DEBUG nova.virt.hardware [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 632.926877] env[61273]: DEBUG nova.virt.hardware [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 632.927097] env[61273]: DEBUG nova.virt.hardware [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 632.927256] env[61273]: DEBUG nova.virt.hardware [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 632.927419] env[61273]: DEBUG nova.virt.hardware [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 632.927577] env[61273]: DEBUG nova.virt.hardware [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 632.927749] env[61273]: DEBUG nova.virt.hardware [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 632.928765] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19c1826c-4617-458c-9a7b-982bc04223fe {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.936978] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3614b70ff09f4caa9d6cd887fdd3b906 [ 632.938380] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aed6d0c1-aebe-4921-b50a-691091300fbe {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.198259] env[61273]: DEBUG oslo_concurrency.lockutils [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 633.302480] env[61273]: ERROR nova.compute.manager [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b6e7038c-dfd6-439e-846f-4aba6ca1318b, please check neutron logs for more information. [ 633.302480] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 633.302480] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 633.302480] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 633.302480] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 633.302480] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 633.302480] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 633.302480] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 633.302480] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 633.302480] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 633.302480] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 633.302480] env[61273]: ERROR nova.compute.manager raise self.value [ 633.302480] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 633.302480] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 633.302480] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 633.302480] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 633.302958] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 633.302958] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 633.302958] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b6e7038c-dfd6-439e-846f-4aba6ca1318b, please check neutron logs for more information. [ 633.302958] env[61273]: ERROR nova.compute.manager [ 633.302958] env[61273]: Traceback (most recent call last): [ 633.302958] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 633.302958] env[61273]: listener.cb(fileno) [ 633.302958] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 633.302958] env[61273]: result = function(*args, **kwargs) [ 633.302958] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 633.302958] env[61273]: return func(*args, **kwargs) [ 633.302958] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 633.302958] env[61273]: raise e [ 633.302958] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 633.302958] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 633.302958] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 633.302958] env[61273]: created_port_ids = self._update_ports_for_instance( [ 633.302958] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 633.302958] env[61273]: with excutils.save_and_reraise_exception(): [ 633.302958] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 633.302958] env[61273]: self.force_reraise() [ 633.302958] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 633.302958] env[61273]: raise self.value [ 633.302958] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 633.302958] env[61273]: updated_port = self._update_port( [ 633.302958] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 633.302958] env[61273]: _ensure_no_port_binding_failure(port) [ 633.302958] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 633.302958] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 633.303813] env[61273]: nova.exception.PortBindingFailed: Binding failed for port b6e7038c-dfd6-439e-846f-4aba6ca1318b, please check neutron logs for more information. [ 633.303813] env[61273]: Removing descriptor: 19 [ 633.303813] env[61273]: ERROR nova.compute.manager [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b6e7038c-dfd6-439e-846f-4aba6ca1318b, please check neutron logs for more information. [ 633.303813] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] Traceback (most recent call last): [ 633.303813] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 633.303813] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] yield resources [ 633.303813] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 633.303813] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] self.driver.spawn(context, instance, image_meta, [ 633.303813] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 633.303813] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] self._vmops.spawn(context, instance, image_meta, injected_files, [ 633.303813] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 633.303813] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] vm_ref = self.build_virtual_machine(instance, [ 633.304178] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 633.304178] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] vif_infos = vmwarevif.get_vif_info(self._session, [ 633.304178] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 633.304178] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] for vif in network_info: [ 633.304178] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 633.304178] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] return self._sync_wrapper(fn, *args, **kwargs) [ 633.304178] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 633.304178] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] self.wait() [ 633.304178] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 633.304178] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] self[:] = self._gt.wait() [ 633.304178] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 633.304178] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] return self._exit_event.wait() [ 633.304178] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 633.304553] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] result = hub.switch() [ 633.304553] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 633.304553] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] return self.greenlet.switch() [ 633.304553] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 633.304553] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] result = function(*args, **kwargs) [ 633.304553] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 633.304553] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] return func(*args, **kwargs) [ 633.304553] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 633.304553] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] raise e [ 633.304553] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 633.304553] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] nwinfo = self.network_api.allocate_for_instance( [ 633.304553] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 633.304553] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] created_port_ids = self._update_ports_for_instance( [ 633.304927] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 633.304927] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] with excutils.save_and_reraise_exception(): [ 633.304927] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 633.304927] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] self.force_reraise() [ 633.304927] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 633.304927] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] raise self.value [ 633.304927] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 633.304927] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] updated_port = self._update_port( [ 633.304927] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 633.304927] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] _ensure_no_port_binding_failure(port) [ 633.304927] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 633.304927] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] raise exception.PortBindingFailed(port_id=port['id']) [ 633.305305] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] nova.exception.PortBindingFailed: Binding failed for port b6e7038c-dfd6-439e-846f-4aba6ca1318b, please check neutron logs for more information. [ 633.305305] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] [ 633.305305] env[61273]: INFO nova.compute.manager [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] Terminating instance [ 633.305880] env[61273]: DEBUG oslo_concurrency.lockutils [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Acquiring lock "refresh_cache-d4d3db12-8de6-4daf-a087-89bb043d1217" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 633.305880] env[61273]: DEBUG oslo_concurrency.lockutils [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Acquired lock "refresh_cache-d4d3db12-8de6-4daf-a087-89bb043d1217" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 633.306031] env[61273]: DEBUG nova.network.neutron [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 633.306451] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Expecting reply to msg 0c0503ab570840e0be59429b8c84e007 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 633.313711] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0c0503ab570840e0be59429b8c84e007 [ 633.356434] env[61273]: DEBUG nova.compute.manager [req-8abf710b-12a9-4d6e-9b34-b7fa6dd282b9 req-4940e46d-b23c-4b9f-90fd-2392072df2ca service nova] [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] Received event network-changed-b6e7038c-dfd6-439e-846f-4aba6ca1318b {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 633.357120] env[61273]: DEBUG nova.compute.manager [req-8abf710b-12a9-4d6e-9b34-b7fa6dd282b9 req-4940e46d-b23c-4b9f-90fd-2392072df2ca service nova] [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] Refreshing instance network info cache due to event network-changed-b6e7038c-dfd6-439e-846f-4aba6ca1318b. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 633.357120] env[61273]: DEBUG oslo_concurrency.lockutils [req-8abf710b-12a9-4d6e-9b34-b7fa6dd282b9 req-4940e46d-b23c-4b9f-90fd-2392072df2ca service nova] Acquiring lock "refresh_cache-d4d3db12-8de6-4daf-a087-89bb043d1217" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 633.434271] env[61273]: DEBUG oslo_concurrency.lockutils [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.565s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 633.434395] env[61273]: DEBUG nova.compute.manager [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] [instance: 13c1d417-4087-46ad-b513-fc3317995d18] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 633.436264] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] Expecting reply to msg 00b5e59a084d4d4e8cd48932062e873a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 633.437686] env[61273]: DEBUG oslo_concurrency.lockutils [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.634s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 633.439186] env[61273]: INFO nova.compute.claims [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 633.440768] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] Expecting reply to msg eca4c8847f924d6cb11e97aefb1eb3b9 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 633.490225] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 00b5e59a084d4d4e8cd48932062e873a [ 633.493658] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eca4c8847f924d6cb11e97aefb1eb3b9 [ 633.853143] env[61273]: DEBUG nova.network.neutron [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 633.876181] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] Acquiring lock "2a7d4872-4ed7-4058-bc36-b199d89a9f14" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 633.876408] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] Lock "2a7d4872-4ed7-4058-bc36-b199d89a9f14" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 633.944874] env[61273]: DEBUG nova.compute.utils [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 633.945515] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] Expecting reply to msg cbe20d5ce6404df19d0651deea015b3a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 633.948267] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] Expecting reply to msg f7faf5ebb2d8454ca1fab87af99261dc in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 633.957162] env[61273]: DEBUG nova.network.neutron [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 633.957602] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Expecting reply to msg 44e546f6e70241dc9a314ac250160dec in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 633.959062] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cbe20d5ce6404df19d0651deea015b3a [ 633.959959] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f7faf5ebb2d8454ca1fab87af99261dc [ 633.960141] env[61273]: DEBUG nova.compute.manager [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] [instance: 13c1d417-4087-46ad-b513-fc3317995d18] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 633.960317] env[61273]: DEBUG nova.network.neutron [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] [instance: 13c1d417-4087-46ad-b513-fc3317995d18] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 633.965497] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 44e546f6e70241dc9a314ac250160dec [ 634.045127] env[61273]: DEBUG nova.policy [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e0e2ea2c33c641b6a0a8114ae60f9bfc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd6c3b64a422f4697a7bcd3a33a414842', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 634.389725] env[61273]: DEBUG nova.network.neutron [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] [instance: 13c1d417-4087-46ad-b513-fc3317995d18] Successfully created port: c0184233-b80c-46b2-a4e8-f4582c2edc01 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 634.448266] env[61273]: DEBUG nova.compute.manager [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] [instance: 13c1d417-4087-46ad-b513-fc3317995d18] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 634.450218] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] Expecting reply to msg 62ac61185d4e4988acae9ed5a7edd3b2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 634.459691] env[61273]: DEBUG oslo_concurrency.lockutils [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Releasing lock "refresh_cache-d4d3db12-8de6-4daf-a087-89bb043d1217" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 634.460128] env[61273]: DEBUG nova.compute.manager [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 634.460322] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 634.460896] env[61273]: DEBUG oslo_concurrency.lockutils [req-8abf710b-12a9-4d6e-9b34-b7fa6dd282b9 req-4940e46d-b23c-4b9f-90fd-2392072df2ca service nova] Acquired lock "refresh_cache-d4d3db12-8de6-4daf-a087-89bb043d1217" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 634.461069] env[61273]: DEBUG nova.network.neutron [req-8abf710b-12a9-4d6e-9b34-b7fa6dd282b9 req-4940e46d-b23c-4b9f-90fd-2392072df2ca service nova] [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] Refreshing network info cache for port b6e7038c-dfd6-439e-846f-4aba6ca1318b {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 634.461451] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-8abf710b-12a9-4d6e-9b34-b7fa6dd282b9 req-4940e46d-b23c-4b9f-90fd-2392072df2ca service nova] Expecting reply to msg a11f5be94522416890d0d070b2a69f3c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 634.462285] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4914ea90-67bc-4aec-bb9a-9d328f29b074 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.469377] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a11f5be94522416890d0d070b2a69f3c [ 634.475168] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ada8923d-c98c-4ebe-865e-e40a5f9fb236 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.503187] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 62ac61185d4e4988acae9ed5a7edd3b2 [ 634.509954] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d4d3db12-8de6-4daf-a087-89bb043d1217 could not be found. [ 634.510251] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 634.510574] env[61273]: INFO nova.compute.manager [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] Took 0.05 seconds to destroy the instance on the hypervisor. [ 634.510831] env[61273]: DEBUG oslo.service.loopingcall [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 634.511040] env[61273]: DEBUG nova.compute.manager [-] [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 634.511131] env[61273]: DEBUG nova.network.neutron [-] [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 634.529512] env[61273]: DEBUG nova.network.neutron [-] [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 634.529512] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 20704730790249249e21f280ab73dfa5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 634.539381] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 20704730790249249e21f280ab73dfa5 [ 634.878864] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89688316-6c17-4660-8b32-270e7ed29142 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.888258] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11cb69a6-e8e5-466d-9c72-9ef22f5ad67d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.918110] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd327263-cf8d-4037-9b6c-a2b4be047ba7 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.925282] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61cad15d-3ef5-4b8d-b1d7-4af59b836255 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.938288] env[61273]: DEBUG nova.compute.provider_tree [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 634.938791] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] Expecting reply to msg 3750e7e7ced44b1b85ece5640e891d34 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 634.950730] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3750e7e7ced44b1b85ece5640e891d34 [ 634.954827] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] Expecting reply to msg 0cbe256ee009411684c8188b1fbfc8be in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 634.984465] env[61273]: DEBUG nova.network.neutron [req-8abf710b-12a9-4d6e-9b34-b7fa6dd282b9 req-4940e46d-b23c-4b9f-90fd-2392072df2ca service nova] [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 634.986608] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0cbe256ee009411684c8188b1fbfc8be [ 635.035797] env[61273]: DEBUG nova.network.neutron [req-8abf710b-12a9-4d6e-9b34-b7fa6dd282b9 req-4940e46d-b23c-4b9f-90fd-2392072df2ca service nova] [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 635.036446] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-8abf710b-12a9-4d6e-9b34-b7fa6dd282b9 req-4940e46d-b23c-4b9f-90fd-2392072df2ca service nova] Expecting reply to msg 238edd4d27964ccf86796cd74e39961b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 635.037247] env[61273]: DEBUG nova.network.neutron [-] [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 635.037559] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg ff5724d9584d499ea65250c2a5b2d2ef in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 635.047010] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 238edd4d27964ccf86796cd74e39961b [ 635.060957] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ff5724d9584d499ea65250c2a5b2d2ef [ 635.446388] env[61273]: DEBUG nova.scheduler.client.report [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 635.448856] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] Expecting reply to msg 4a16de19b2974533ba19ed41b1f02304 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 635.459791] env[61273]: DEBUG nova.compute.manager [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] [instance: 13c1d417-4087-46ad-b513-fc3317995d18] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 635.463368] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4a16de19b2974533ba19ed41b1f02304 [ 635.466552] env[61273]: DEBUG nova.compute.manager [req-49709936-1d5c-483a-9033-6068314d279c req-238e15a1-2ee1-45de-a45d-a90989416e4c service nova] [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] Received event network-vif-deleted-b6e7038c-dfd6-439e-846f-4aba6ca1318b {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 635.466659] env[61273]: DEBUG nova.compute.manager [req-49709936-1d5c-483a-9033-6068314d279c req-238e15a1-2ee1-45de-a45d-a90989416e4c service nova] [instance: 13c1d417-4087-46ad-b513-fc3317995d18] Received event network-changed-c0184233-b80c-46b2-a4e8-f4582c2edc01 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 635.466825] env[61273]: DEBUG nova.compute.manager [req-49709936-1d5c-483a-9033-6068314d279c req-238e15a1-2ee1-45de-a45d-a90989416e4c service nova] [instance: 13c1d417-4087-46ad-b513-fc3317995d18] Refreshing instance network info cache due to event network-changed-c0184233-b80c-46b2-a4e8-f4582c2edc01. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 635.467061] env[61273]: DEBUG oslo_concurrency.lockutils [req-49709936-1d5c-483a-9033-6068314d279c req-238e15a1-2ee1-45de-a45d-a90989416e4c service nova] Acquiring lock "refresh_cache-13c1d417-4087-46ad-b513-fc3317995d18" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 635.467504] env[61273]: DEBUG oslo_concurrency.lockutils [req-49709936-1d5c-483a-9033-6068314d279c req-238e15a1-2ee1-45de-a45d-a90989416e4c service nova] Acquired lock "refresh_cache-13c1d417-4087-46ad-b513-fc3317995d18" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 635.467504] env[61273]: DEBUG nova.network.neutron [req-49709936-1d5c-483a-9033-6068314d279c req-238e15a1-2ee1-45de-a45d-a90989416e4c service nova] [instance: 13c1d417-4087-46ad-b513-fc3317995d18] Refreshing network info cache for port c0184233-b80c-46b2-a4e8-f4582c2edc01 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 635.468055] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-49709936-1d5c-483a-9033-6068314d279c req-238e15a1-2ee1-45de-a45d-a90989416e4c service nova] Expecting reply to msg 2d4d68fa57664a0e8c4792c205a86ac5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 635.474290] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2d4d68fa57664a0e8c4792c205a86ac5 [ 635.483557] env[61273]: DEBUG nova.virt.hardware [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 635.483801] env[61273]: DEBUG nova.virt.hardware [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 635.483994] env[61273]: DEBUG nova.virt.hardware [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 635.484204] env[61273]: DEBUG nova.virt.hardware [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 635.484356] env[61273]: DEBUG nova.virt.hardware [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 635.484521] env[61273]: DEBUG nova.virt.hardware [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 635.484743] env[61273]: DEBUG nova.virt.hardware [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 635.484903] env[61273]: DEBUG nova.virt.hardware [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 635.485065] env[61273]: DEBUG nova.virt.hardware [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 635.485227] env[61273]: DEBUG nova.virt.hardware [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 635.485397] env[61273]: DEBUG nova.virt.hardware [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 635.486786] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e175c83d-7575-4c33-a38e-b9563a9efe62 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.495268] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e293e4e-41d9-43a5-b08e-ab8d661cd879 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.509574] env[61273]: ERROR nova.compute.manager [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port c0184233-b80c-46b2-a4e8-f4582c2edc01, please check neutron logs for more information. [ 635.509574] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 635.509574] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 635.509574] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 635.509574] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 635.509574] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 635.509574] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 635.509574] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 635.509574] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 635.509574] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 635.509574] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 635.509574] env[61273]: ERROR nova.compute.manager raise self.value [ 635.509574] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 635.509574] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 635.509574] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 635.509574] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 635.510251] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 635.510251] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 635.510251] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port c0184233-b80c-46b2-a4e8-f4582c2edc01, please check neutron logs for more information. [ 635.510251] env[61273]: ERROR nova.compute.manager [ 635.510251] env[61273]: Traceback (most recent call last): [ 635.510251] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 635.510251] env[61273]: listener.cb(fileno) [ 635.510251] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 635.510251] env[61273]: result = function(*args, **kwargs) [ 635.510251] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 635.510251] env[61273]: return func(*args, **kwargs) [ 635.510251] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 635.510251] env[61273]: raise e [ 635.510251] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 635.510251] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 635.510251] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 635.510251] env[61273]: created_port_ids = self._update_ports_for_instance( [ 635.510251] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 635.510251] env[61273]: with excutils.save_and_reraise_exception(): [ 635.510251] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 635.510251] env[61273]: self.force_reraise() [ 635.510251] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 635.510251] env[61273]: raise self.value [ 635.510251] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 635.510251] env[61273]: updated_port = self._update_port( [ 635.510251] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 635.510251] env[61273]: _ensure_no_port_binding_failure(port) [ 635.510251] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 635.510251] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 635.511416] env[61273]: nova.exception.PortBindingFailed: Binding failed for port c0184233-b80c-46b2-a4e8-f4582c2edc01, please check neutron logs for more information. [ 635.511416] env[61273]: Removing descriptor: 19 [ 635.511416] env[61273]: ERROR nova.compute.manager [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] [instance: 13c1d417-4087-46ad-b513-fc3317995d18] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port c0184233-b80c-46b2-a4e8-f4582c2edc01, please check neutron logs for more information. [ 635.511416] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] Traceback (most recent call last): [ 635.511416] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 635.511416] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] yield resources [ 635.511416] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 635.511416] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] self.driver.spawn(context, instance, image_meta, [ 635.511416] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 635.511416] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] self._vmops.spawn(context, instance, image_meta, injected_files, [ 635.511416] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 635.511416] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] vm_ref = self.build_virtual_machine(instance, [ 635.511942] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 635.511942] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] vif_infos = vmwarevif.get_vif_info(self._session, [ 635.511942] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 635.511942] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] for vif in network_info: [ 635.511942] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 635.511942] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] return self._sync_wrapper(fn, *args, **kwargs) [ 635.511942] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 635.511942] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] self.wait() [ 635.511942] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 635.511942] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] self[:] = self._gt.wait() [ 635.511942] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 635.511942] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] return self._exit_event.wait() [ 635.511942] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 635.512483] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] result = hub.switch() [ 635.512483] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 635.512483] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] return self.greenlet.switch() [ 635.512483] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 635.512483] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] result = function(*args, **kwargs) [ 635.512483] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 635.512483] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] return func(*args, **kwargs) [ 635.512483] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 635.512483] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] raise e [ 635.512483] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 635.512483] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] nwinfo = self.network_api.allocate_for_instance( [ 635.512483] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 635.512483] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] created_port_ids = self._update_ports_for_instance( [ 635.513103] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 635.513103] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] with excutils.save_and_reraise_exception(): [ 635.513103] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 635.513103] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] self.force_reraise() [ 635.513103] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 635.513103] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] raise self.value [ 635.513103] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 635.513103] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] updated_port = self._update_port( [ 635.513103] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 635.513103] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] _ensure_no_port_binding_failure(port) [ 635.513103] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 635.513103] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] raise exception.PortBindingFailed(port_id=port['id']) [ 635.513603] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] nova.exception.PortBindingFailed: Binding failed for port c0184233-b80c-46b2-a4e8-f4582c2edc01, please check neutron logs for more information. [ 635.513603] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] [ 635.513603] env[61273]: INFO nova.compute.manager [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] [instance: 13c1d417-4087-46ad-b513-fc3317995d18] Terminating instance [ 635.513603] env[61273]: DEBUG oslo_concurrency.lockutils [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] Acquiring lock "refresh_cache-13c1d417-4087-46ad-b513-fc3317995d18" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 635.541386] env[61273]: DEBUG oslo_concurrency.lockutils [req-8abf710b-12a9-4d6e-9b34-b7fa6dd282b9 req-4940e46d-b23c-4b9f-90fd-2392072df2ca service nova] Releasing lock "refresh_cache-d4d3db12-8de6-4daf-a087-89bb043d1217" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 635.541948] env[61273]: INFO nova.compute.manager [-] [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] Took 1.03 seconds to deallocate network for instance. [ 635.544419] env[61273]: DEBUG nova.compute.claims [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 635.544590] env[61273]: DEBUG oslo_concurrency.lockutils [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 635.951395] env[61273]: DEBUG oslo_concurrency.lockutils [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.514s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 635.952043] env[61273]: DEBUG nova.compute.manager [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 635.953839] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] Expecting reply to msg 31eaca74662640a4a037e56f26de8a6e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 635.954930] env[61273]: DEBUG oslo_concurrency.lockutils [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.096s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 635.956404] env[61273]: INFO nova.compute.claims [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 635.957877] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] Expecting reply to msg 772fc13eeb35482181f8b7d2f2f88646 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 635.989315] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 31eaca74662640a4a037e56f26de8a6e [ 635.993963] env[61273]: DEBUG nova.network.neutron [req-49709936-1d5c-483a-9033-6068314d279c req-238e15a1-2ee1-45de-a45d-a90989416e4c service nova] [instance: 13c1d417-4087-46ad-b513-fc3317995d18] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 635.999744] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 772fc13eeb35482181f8b7d2f2f88646 [ 636.083483] env[61273]: DEBUG nova.network.neutron [req-49709936-1d5c-483a-9033-6068314d279c req-238e15a1-2ee1-45de-a45d-a90989416e4c service nova] [instance: 13c1d417-4087-46ad-b513-fc3317995d18] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 636.084041] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-49709936-1d5c-483a-9033-6068314d279c req-238e15a1-2ee1-45de-a45d-a90989416e4c service nova] Expecting reply to msg f2ee9c5976fe4eb380b7c6f06388f47d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 636.092434] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f2ee9c5976fe4eb380b7c6f06388f47d [ 636.463340] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] Expecting reply to msg 824b3aa220254838b7befde92074bc48 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 636.465561] env[61273]: DEBUG nova.compute.utils [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 636.466157] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] Expecting reply to msg 525e30b1c3834e8f91efbe1a76b31675 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 636.467129] env[61273]: DEBUG nova.compute.manager [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 636.467298] env[61273]: DEBUG nova.network.neutron [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 636.473216] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 824b3aa220254838b7befde92074bc48 [ 636.475271] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 525e30b1c3834e8f91efbe1a76b31675 [ 636.524163] env[61273]: DEBUG nova.policy [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '61307ba65ba9474ebf19305ed6fb735d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9a626302e1fb47f282f9cff6b397713c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 636.586566] env[61273]: DEBUG oslo_concurrency.lockutils [req-49709936-1d5c-483a-9033-6068314d279c req-238e15a1-2ee1-45de-a45d-a90989416e4c service nova] Releasing lock "refresh_cache-13c1d417-4087-46ad-b513-fc3317995d18" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 636.586990] env[61273]: DEBUG oslo_concurrency.lockutils [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] Acquired lock "refresh_cache-13c1d417-4087-46ad-b513-fc3317995d18" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 636.587176] env[61273]: DEBUG nova.network.neutron [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] [instance: 13c1d417-4087-46ad-b513-fc3317995d18] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 636.587615] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] Expecting reply to msg 5750f6287f93464fb96ef343b73e34fb in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 636.595945] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5750f6287f93464fb96ef343b73e34fb [ 636.849251] env[61273]: DEBUG nova.network.neutron [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] Successfully created port: 90378eb2-b59a-48b8-a577-33399d56d5c9 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 636.971106] env[61273]: DEBUG nova.compute.manager [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 636.972840] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] Expecting reply to msg da158881179c469aa8e3af3b0ca5de7e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 637.029141] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg da158881179c469aa8e3af3b0ca5de7e [ 637.113747] env[61273]: DEBUG nova.network.neutron [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] [instance: 13c1d417-4087-46ad-b513-fc3317995d18] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 637.190621] env[61273]: DEBUG nova.network.neutron [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] [instance: 13c1d417-4087-46ad-b513-fc3317995d18] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 637.191163] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] Expecting reply to msg 8b19ee6981a14f889613342b1b235aa5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 637.199619] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8b19ee6981a14f889613342b1b235aa5 [ 637.328651] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8dbac7e-3b0c-47e9-8b22-9984bb206a6a {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.336327] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1828ba53-6854-4df8-a65d-3d8be9dc21cf {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.365026] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4da5ba52-99e4-40e6-a51d-e62879e6bdea {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.371847] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8408787a-708a-48f3-8b8c-60c08ac55899 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.387815] env[61273]: DEBUG nova.compute.provider_tree [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 637.389621] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] Expecting reply to msg 949ce1300a934f04afc65a1ef3c78a4b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 637.396840] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 949ce1300a934f04afc65a1ef3c78a4b [ 637.484274] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] Expecting reply to msg dad93a4143b94035b47e0b04a5cbadd0 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 637.502159] env[61273]: DEBUG nova.compute.manager [req-6bc614db-ffba-4ec2-a633-025b8203a095 req-6c9704ae-2a1f-4407-af15-2c3859bf8cae service nova] [instance: 13c1d417-4087-46ad-b513-fc3317995d18] Received event network-vif-deleted-c0184233-b80c-46b2-a4e8-f4582c2edc01 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 637.514006] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dad93a4143b94035b47e0b04a5cbadd0 [ 637.699952] env[61273]: DEBUG oslo_concurrency.lockutils [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] Releasing lock "refresh_cache-13c1d417-4087-46ad-b513-fc3317995d18" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 637.700415] env[61273]: DEBUG nova.compute.manager [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] [instance: 13c1d417-4087-46ad-b513-fc3317995d18] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 637.700610] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] [instance: 13c1d417-4087-46ad-b513-fc3317995d18] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 637.700922] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6e7b1ac1-28c3-497d-9174-008f98c26408 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.709661] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d40e9af-7f33-434d-b208-931f36677091 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.733617] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] [instance: 13c1d417-4087-46ad-b513-fc3317995d18] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 13c1d417-4087-46ad-b513-fc3317995d18 could not be found. [ 637.733882] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] [instance: 13c1d417-4087-46ad-b513-fc3317995d18] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 637.734136] env[61273]: INFO nova.compute.manager [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] [instance: 13c1d417-4087-46ad-b513-fc3317995d18] Took 0.03 seconds to destroy the instance on the hypervisor. [ 637.734439] env[61273]: DEBUG oslo.service.loopingcall [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 637.734824] env[61273]: DEBUG nova.compute.manager [-] [instance: 13c1d417-4087-46ad-b513-fc3317995d18] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 637.734986] env[61273]: DEBUG nova.network.neutron [-] [instance: 13c1d417-4087-46ad-b513-fc3317995d18] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 637.751178] env[61273]: DEBUG nova.network.neutron [-] [instance: 13c1d417-4087-46ad-b513-fc3317995d18] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 637.751178] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg c54a7a7525814ffbb1ae31c43845b560 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 637.757327] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c54a7a7525814ffbb1ae31c43845b560 [ 637.890721] env[61273]: DEBUG nova.scheduler.client.report [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 637.893847] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] Expecting reply to msg 2fc878888d274b2687d0c92b9634248c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 637.898251] env[61273]: ERROR nova.compute.manager [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 90378eb2-b59a-48b8-a577-33399d56d5c9, please check neutron logs for more information. [ 637.898251] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 637.898251] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 637.898251] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 637.898251] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 637.898251] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 637.898251] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 637.898251] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 637.898251] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 637.898251] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 637.898251] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 637.898251] env[61273]: ERROR nova.compute.manager raise self.value [ 637.898251] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 637.898251] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 637.898251] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 637.898251] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 637.899050] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 637.899050] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 637.899050] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 90378eb2-b59a-48b8-a577-33399d56d5c9, please check neutron logs for more information. [ 637.899050] env[61273]: ERROR nova.compute.manager [ 637.899050] env[61273]: Traceback (most recent call last): [ 637.899050] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 637.899050] env[61273]: listener.cb(fileno) [ 637.899050] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 637.899050] env[61273]: result = function(*args, **kwargs) [ 637.899050] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 637.899050] env[61273]: return func(*args, **kwargs) [ 637.899050] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 637.899050] env[61273]: raise e [ 637.899050] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 637.899050] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 637.899050] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 637.899050] env[61273]: created_port_ids = self._update_ports_for_instance( [ 637.899050] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 637.899050] env[61273]: with excutils.save_and_reraise_exception(): [ 637.899050] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 637.899050] env[61273]: self.force_reraise() [ 637.899050] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 637.899050] env[61273]: raise self.value [ 637.899050] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 637.899050] env[61273]: updated_port = self._update_port( [ 637.899050] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 637.899050] env[61273]: _ensure_no_port_binding_failure(port) [ 637.899050] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 637.899050] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 637.900040] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 90378eb2-b59a-48b8-a577-33399d56d5c9, please check neutron logs for more information. [ 637.900040] env[61273]: Removing descriptor: 19 [ 637.908558] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2fc878888d274b2687d0c92b9634248c [ 637.998382] env[61273]: DEBUG nova.compute.manager [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 638.024760] env[61273]: DEBUG nova.virt.hardware [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 638.025188] env[61273]: DEBUG nova.virt.hardware [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 638.025358] env[61273]: DEBUG nova.virt.hardware [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 638.025542] env[61273]: DEBUG nova.virt.hardware [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 638.025685] env[61273]: DEBUG nova.virt.hardware [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 638.025826] env[61273]: DEBUG nova.virt.hardware [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 638.026031] env[61273]: DEBUG nova.virt.hardware [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 638.026187] env[61273]: DEBUG nova.virt.hardware [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 638.026411] env[61273]: DEBUG nova.virt.hardware [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 638.026610] env[61273]: DEBUG nova.virt.hardware [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 638.026790] env[61273]: DEBUG nova.virt.hardware [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 638.027930] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fab3da1-4925-42db-810b-608c6c4b0b45 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.035756] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daaea969-97a3-48a8-94af-589a161d651c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.049966] env[61273]: ERROR nova.compute.manager [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 90378eb2-b59a-48b8-a577-33399d56d5c9, please check neutron logs for more information. [ 638.049966] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] Traceback (most recent call last): [ 638.049966] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 638.049966] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] yield resources [ 638.049966] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 638.049966] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] self.driver.spawn(context, instance, image_meta, [ 638.049966] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 638.049966] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] self._vmops.spawn(context, instance, image_meta, injected_files, [ 638.049966] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 638.049966] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] vm_ref = self.build_virtual_machine(instance, [ 638.049966] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 638.050312] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] vif_infos = vmwarevif.get_vif_info(self._session, [ 638.050312] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 638.050312] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] for vif in network_info: [ 638.050312] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 638.050312] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] return self._sync_wrapper(fn, *args, **kwargs) [ 638.050312] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 638.050312] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] self.wait() [ 638.050312] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 638.050312] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] self[:] = self._gt.wait() [ 638.050312] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 638.050312] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] return self._exit_event.wait() [ 638.050312] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 638.050312] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] current.throw(*self._exc) [ 638.050676] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 638.050676] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] result = function(*args, **kwargs) [ 638.050676] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 638.050676] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] return func(*args, **kwargs) [ 638.050676] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 638.050676] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] raise e [ 638.050676] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 638.050676] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] nwinfo = self.network_api.allocate_for_instance( [ 638.050676] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 638.050676] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] created_port_ids = self._update_ports_for_instance( [ 638.050676] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 638.050676] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] with excutils.save_and_reraise_exception(): [ 638.050676] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 638.051048] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] self.force_reraise() [ 638.051048] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 638.051048] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] raise self.value [ 638.051048] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 638.051048] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] updated_port = self._update_port( [ 638.051048] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 638.051048] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] _ensure_no_port_binding_failure(port) [ 638.051048] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 638.051048] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] raise exception.PortBindingFailed(port_id=port['id']) [ 638.051048] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] nova.exception.PortBindingFailed: Binding failed for port 90378eb2-b59a-48b8-a577-33399d56d5c9, please check neutron logs for more information. [ 638.051048] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] [ 638.051048] env[61273]: INFO nova.compute.manager [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] Terminating instance [ 638.052292] env[61273]: DEBUG oslo_concurrency.lockutils [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] Acquiring lock "refresh_cache-782dc85a-56f4-4f03-8711-b78bbadb33ce" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 638.052459] env[61273]: DEBUG oslo_concurrency.lockutils [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] Acquired lock "refresh_cache-782dc85a-56f4-4f03-8711-b78bbadb33ce" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 638.052622] env[61273]: DEBUG nova.network.neutron [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 638.053041] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] Expecting reply to msg 8dbb44aa61c64732b38c4edad18dd6de in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 638.059417] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8dbb44aa61c64732b38c4edad18dd6de [ 638.252719] env[61273]: DEBUG nova.network.neutron [-] [instance: 13c1d417-4087-46ad-b513-fc3317995d18] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 638.253212] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 0f4a97be8ba14b5cb137f9edf96e732a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 638.261800] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0f4a97be8ba14b5cb137f9edf96e732a [ 638.396493] env[61273]: DEBUG oslo_concurrency.lockutils [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.441s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 638.396590] env[61273]: DEBUG nova.compute.manager [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 638.398270] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] Expecting reply to msg 527baec688df4468932522712de4d34b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 638.399276] env[61273]: DEBUG oslo_concurrency.lockutils [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.379s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 638.400918] env[61273]: INFO nova.compute.claims [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 638.402627] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] Expecting reply to msg a67dcec691a6423b8939d4874711864c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 638.434459] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 527baec688df4468932522712de4d34b [ 638.440100] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a67dcec691a6423b8939d4874711864c [ 638.570261] env[61273]: DEBUG nova.network.neutron [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 638.654481] env[61273]: DEBUG nova.network.neutron [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 638.654906] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] Expecting reply to msg a24daf9bfd8e4225803992249ddd614a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 638.663753] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a24daf9bfd8e4225803992249ddd614a [ 638.756043] env[61273]: INFO nova.compute.manager [-] [instance: 13c1d417-4087-46ad-b513-fc3317995d18] Took 1.02 seconds to deallocate network for instance. [ 638.758345] env[61273]: DEBUG nova.compute.claims [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] [instance: 13c1d417-4087-46ad-b513-fc3317995d18] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 638.758532] env[61273]: DEBUG oslo_concurrency.lockutils [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 638.906233] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] Expecting reply to msg 651498c001514bd19233bcc185a2a7b4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 638.907932] env[61273]: DEBUG nova.compute.utils [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 638.908837] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] Expecting reply to msg d93534a53aa2431286f5691da9f252c1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 638.909488] env[61273]: DEBUG nova.compute.manager [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 638.909646] env[61273]: DEBUG nova.network.neutron [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 638.916362] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 651498c001514bd19233bcc185a2a7b4 [ 638.919132] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d93534a53aa2431286f5691da9f252c1 [ 638.966382] env[61273]: DEBUG nova.policy [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '906e8368f31e4815892c2b75e91f9d27', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8e691198c7b14436bfc38c8b97b9519d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 639.157780] env[61273]: DEBUG oslo_concurrency.lockutils [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] Releasing lock "refresh_cache-782dc85a-56f4-4f03-8711-b78bbadb33ce" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 639.158160] env[61273]: DEBUG nova.compute.manager [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 639.158416] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 639.158740] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-205ce5e1-7480-4bdd-9f31-9f3ebc07a09e {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.168537] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bd7c8ef-949c-4474-902f-7bcf498af8f8 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.191886] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 782dc85a-56f4-4f03-8711-b78bbadb33ce could not be found. [ 639.192182] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 639.192375] env[61273]: INFO nova.compute.manager [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] Took 0.03 seconds to destroy the instance on the hypervisor. [ 639.192660] env[61273]: DEBUG oslo.service.loopingcall [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 639.192882] env[61273]: DEBUG nova.compute.manager [-] [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 639.192981] env[61273]: DEBUG nova.network.neutron [-] [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 639.207874] env[61273]: DEBUG nova.network.neutron [-] [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 639.208310] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg e06eda5d9d08466893643a8f4ad18439 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 639.217383] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e06eda5d9d08466893643a8f4ad18439 [ 639.414151] env[61273]: DEBUG nova.compute.manager [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 639.415805] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] Expecting reply to msg d1b661a411eb4eed8680116fe1618a74 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 639.457759] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d1b661a411eb4eed8680116fe1618a74 [ 639.563493] env[61273]: DEBUG nova.network.neutron [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] Successfully created port: 65fe8495-4b99-4083-8128-022b6c4de52b {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 639.710068] env[61273]: DEBUG nova.network.neutron [-] [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 639.710563] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg b466603db7ca4e508f2b46be4f49d214 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 639.724536] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b466603db7ca4e508f2b46be4f49d214 [ 639.790930] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaae3270-fbc6-4ff4-a209-462124b86c09 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.798676] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c54e754b-de7f-4e21-bc4c-0c592d7a114f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.843149] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13786cce-8a06-42a0-a6ae-5ff05ac57d90 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.852513] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd258dc6-5ef5-4885-ae8e-5dd9ec24d9f3 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.867176] env[61273]: DEBUG nova.compute.provider_tree [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 639.867826] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] Expecting reply to msg 8f22b2a5849e4ffe9982b112b71cb734 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 639.877359] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8f22b2a5849e4ffe9982b112b71cb734 [ 639.900546] env[61273]: DEBUG nova.compute.manager [req-891709a5-8ef8-4817-aa86-b59fc3c4a3e2 req-f751b190-627f-4d66-a992-e2d2989e3189 service nova] [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] Received event network-changed-90378eb2-b59a-48b8-a577-33399d56d5c9 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 639.900755] env[61273]: DEBUG nova.compute.manager [req-891709a5-8ef8-4817-aa86-b59fc3c4a3e2 req-f751b190-627f-4d66-a992-e2d2989e3189 service nova] [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] Refreshing instance network info cache due to event network-changed-90378eb2-b59a-48b8-a577-33399d56d5c9. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 639.901195] env[61273]: DEBUG oslo_concurrency.lockutils [req-891709a5-8ef8-4817-aa86-b59fc3c4a3e2 req-f751b190-627f-4d66-a992-e2d2989e3189 service nova] Acquiring lock "refresh_cache-782dc85a-56f4-4f03-8711-b78bbadb33ce" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 639.901395] env[61273]: DEBUG oslo_concurrency.lockutils [req-891709a5-8ef8-4817-aa86-b59fc3c4a3e2 req-f751b190-627f-4d66-a992-e2d2989e3189 service nova] Acquired lock "refresh_cache-782dc85a-56f4-4f03-8711-b78bbadb33ce" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 639.901636] env[61273]: DEBUG nova.network.neutron [req-891709a5-8ef8-4817-aa86-b59fc3c4a3e2 req-f751b190-627f-4d66-a992-e2d2989e3189 service nova] [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] Refreshing network info cache for port 90378eb2-b59a-48b8-a577-33399d56d5c9 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 639.902182] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-891709a5-8ef8-4817-aa86-b59fc3c4a3e2 req-f751b190-627f-4d66-a992-e2d2989e3189 service nova] Expecting reply to msg 58a1035b78bb4bd087f1d6f58f30481f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 639.910516] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 58a1035b78bb4bd087f1d6f58f30481f [ 639.925785] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] Expecting reply to msg 3298d126b70041efb23572c627d51a08 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 639.961934] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3298d126b70041efb23572c627d51a08 [ 640.220547] env[61273]: INFO nova.compute.manager [-] [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] Took 1.03 seconds to deallocate network for instance. [ 640.224067] env[61273]: DEBUG nova.compute.claims [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 640.224331] env[61273]: DEBUG oslo_concurrency.lockutils [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 640.370727] env[61273]: DEBUG nova.scheduler.client.report [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 640.373327] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] Expecting reply to msg 40326fa4caec4f1596a7807433e2df04 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 640.385899] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 40326fa4caec4f1596a7807433e2df04 [ 640.420772] env[61273]: DEBUG nova.network.neutron [req-891709a5-8ef8-4817-aa86-b59fc3c4a3e2 req-f751b190-627f-4d66-a992-e2d2989e3189 service nova] [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 640.428217] env[61273]: DEBUG nova.compute.manager [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 640.453238] env[61273]: DEBUG nova.virt.hardware [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 640.453482] env[61273]: DEBUG nova.virt.hardware [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 640.453640] env[61273]: DEBUG nova.virt.hardware [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 640.453825] env[61273]: DEBUG nova.virt.hardware [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 640.453973] env[61273]: DEBUG nova.virt.hardware [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 640.454121] env[61273]: DEBUG nova.virt.hardware [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 640.454324] env[61273]: DEBUG nova.virt.hardware [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 640.454481] env[61273]: DEBUG nova.virt.hardware [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 640.454643] env[61273]: DEBUG nova.virt.hardware [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 640.454808] env[61273]: DEBUG nova.virt.hardware [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 640.454975] env[61273]: DEBUG nova.virt.hardware [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 640.456150] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23609b50-b17d-440c-b59c-826193096602 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.465444] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15ae4a08-e585-4bfc-86f3-183105a3cf43 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.515879] env[61273]: DEBUG nova.network.neutron [req-891709a5-8ef8-4817-aa86-b59fc3c4a3e2 req-f751b190-627f-4d66-a992-e2d2989e3189 service nova] [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 640.516665] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-891709a5-8ef8-4817-aa86-b59fc3c4a3e2 req-f751b190-627f-4d66-a992-e2d2989e3189 service nova] Expecting reply to msg 419eac53c63b4f198142a319b43dd3e4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 640.526437] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 419eac53c63b4f198142a319b43dd3e4 [ 640.714989] env[61273]: ERROR nova.compute.manager [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 65fe8495-4b99-4083-8128-022b6c4de52b, please check neutron logs for more information. [ 640.714989] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 640.714989] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 640.714989] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 640.714989] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 640.714989] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 640.714989] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 640.714989] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 640.714989] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 640.714989] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 640.714989] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 640.714989] env[61273]: ERROR nova.compute.manager raise self.value [ 640.714989] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 640.714989] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 640.714989] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 640.714989] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 640.715627] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 640.715627] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 640.715627] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 65fe8495-4b99-4083-8128-022b6c4de52b, please check neutron logs for more information. [ 640.715627] env[61273]: ERROR nova.compute.manager [ 640.715627] env[61273]: Traceback (most recent call last): [ 640.715627] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 640.715627] env[61273]: listener.cb(fileno) [ 640.715627] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 640.715627] env[61273]: result = function(*args, **kwargs) [ 640.715627] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 640.715627] env[61273]: return func(*args, **kwargs) [ 640.715627] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 640.715627] env[61273]: raise e [ 640.715627] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 640.715627] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 640.715627] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 640.715627] env[61273]: created_port_ids = self._update_ports_for_instance( [ 640.715627] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 640.715627] env[61273]: with excutils.save_and_reraise_exception(): [ 640.715627] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 640.715627] env[61273]: self.force_reraise() [ 640.715627] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 640.715627] env[61273]: raise self.value [ 640.715627] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 640.715627] env[61273]: updated_port = self._update_port( [ 640.715627] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 640.715627] env[61273]: _ensure_no_port_binding_failure(port) [ 640.715627] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 640.715627] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 640.716425] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 65fe8495-4b99-4083-8128-022b6c4de52b, please check neutron logs for more information. [ 640.716425] env[61273]: Removing descriptor: 19 [ 640.716425] env[61273]: ERROR nova.compute.manager [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 65fe8495-4b99-4083-8128-022b6c4de52b, please check neutron logs for more information. [ 640.716425] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] Traceback (most recent call last): [ 640.716425] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 640.716425] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] yield resources [ 640.716425] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 640.716425] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] self.driver.spawn(context, instance, image_meta, [ 640.716425] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 640.716425] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] self._vmops.spawn(context, instance, image_meta, injected_files, [ 640.716425] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 640.716425] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] vm_ref = self.build_virtual_machine(instance, [ 640.716881] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 640.716881] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] vif_infos = vmwarevif.get_vif_info(self._session, [ 640.716881] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 640.716881] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] for vif in network_info: [ 640.716881] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 640.716881] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] return self._sync_wrapper(fn, *args, **kwargs) [ 640.716881] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 640.716881] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] self.wait() [ 640.716881] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 640.716881] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] self[:] = self._gt.wait() [ 640.716881] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 640.716881] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] return self._exit_event.wait() [ 640.716881] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 640.717228] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] result = hub.switch() [ 640.717228] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 640.717228] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] return self.greenlet.switch() [ 640.717228] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 640.717228] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] result = function(*args, **kwargs) [ 640.717228] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 640.717228] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] return func(*args, **kwargs) [ 640.717228] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 640.717228] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] raise e [ 640.717228] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 640.717228] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] nwinfo = self.network_api.allocate_for_instance( [ 640.717228] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 640.717228] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] created_port_ids = self._update_ports_for_instance( [ 640.717581] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 640.717581] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] with excutils.save_and_reraise_exception(): [ 640.717581] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 640.717581] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] self.force_reraise() [ 640.717581] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 640.717581] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] raise self.value [ 640.717581] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 640.717581] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] updated_port = self._update_port( [ 640.717581] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 640.717581] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] _ensure_no_port_binding_failure(port) [ 640.717581] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 640.717581] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] raise exception.PortBindingFailed(port_id=port['id']) [ 640.717919] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] nova.exception.PortBindingFailed: Binding failed for port 65fe8495-4b99-4083-8128-022b6c4de52b, please check neutron logs for more information. [ 640.717919] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] [ 640.717919] env[61273]: INFO nova.compute.manager [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] Terminating instance [ 640.718804] env[61273]: DEBUG oslo_concurrency.lockutils [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] Acquiring lock "refresh_cache-1336becb-9691-490c-86ea-3bc70d13d7df" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 640.719042] env[61273]: DEBUG oslo_concurrency.lockutils [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] Acquired lock "refresh_cache-1336becb-9691-490c-86ea-3bc70d13d7df" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 640.719270] env[61273]: DEBUG nova.network.neutron [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 640.719770] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] Expecting reply to msg bb42129a050f4c758370bba415a21d0e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 640.727794] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bb42129a050f4c758370bba415a21d0e [ 640.876156] env[61273]: DEBUG oslo_concurrency.lockutils [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.477s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 640.876695] env[61273]: DEBUG nova.compute.manager [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 640.878547] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] Expecting reply to msg 4fbd594204ea4346a0460ba6075ba911 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 640.879644] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 17.637s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 640.880221] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 640.880221] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61273) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 640.880221] env[61273]: DEBUG oslo_concurrency.lockutils [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.926s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 640.882103] env[61273]: INFO nova.compute.claims [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 640.883667] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] Expecting reply to msg 109096abbed64327910558240c0472b9 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 640.886628] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfe0d59d-7512-475c-961c-d90e8a700db4 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.894575] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71729d66-fea2-4bf4-b891-5cb73476e17f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.911897] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21da7b35-6825-4bed-a099-64c19f425fdc {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.922664] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af7edd03-e7e1-4052-8c09-d1fafb484184 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.926246] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 109096abbed64327910558240c0472b9 [ 640.926914] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4fbd594204ea4346a0460ba6075ba911 [ 640.964222] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181522MB free_disk=141GB free_vcpus=48 pci_devices=None {{(pid=61273) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 640.964396] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 641.020073] env[61273]: DEBUG oslo_concurrency.lockutils [req-891709a5-8ef8-4817-aa86-b59fc3c4a3e2 req-f751b190-627f-4d66-a992-e2d2989e3189 service nova] Releasing lock "refresh_cache-782dc85a-56f4-4f03-8711-b78bbadb33ce" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 641.020073] env[61273]: DEBUG nova.compute.manager [req-891709a5-8ef8-4817-aa86-b59fc3c4a3e2 req-f751b190-627f-4d66-a992-e2d2989e3189 service nova] [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] Received event network-vif-deleted-90378eb2-b59a-48b8-a577-33399d56d5c9 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 641.239735] env[61273]: DEBUG nova.network.neutron [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 641.316695] env[61273]: DEBUG nova.network.neutron [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 641.316695] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] Expecting reply to msg 267e08f8ec334bcfb3d4fa0af4be423b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 641.333409] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 267e08f8ec334bcfb3d4fa0af4be423b [ 641.389774] env[61273]: DEBUG nova.compute.utils [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 641.389774] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] Expecting reply to msg 135145ac16af4438821a7214a75c88d9 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 641.389936] env[61273]: DEBUG nova.compute.manager [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 641.390214] env[61273]: DEBUG nova.network.neutron [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 641.394149] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] Expecting reply to msg 01dcf76ca1184fd4b58d7740b6ac6310 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 641.402124] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 01dcf76ca1184fd4b58d7740b6ac6310 [ 641.404569] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 135145ac16af4438821a7214a75c88d9 [ 641.432688] env[61273]: DEBUG nova.policy [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8742f4ce8dd94720a20be2ae3f9e4e1c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c95c48da3e454ce4b42d95ae381f16ed', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 641.776364] env[61273]: DEBUG nova.network.neutron [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] Successfully created port: 17c48dca-cfb0-4cdc-9269-490a11efa464 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 641.818984] env[61273]: DEBUG oslo_concurrency.lockutils [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] Releasing lock "refresh_cache-1336becb-9691-490c-86ea-3bc70d13d7df" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 641.819382] env[61273]: DEBUG nova.compute.manager [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 641.819750] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 641.820106] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1ca78998-850a-4860-8563-f70401ac2059 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.834214] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee522ba4-adf5-436c-8005-93510b6ed3f6 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.858899] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1336becb-9691-490c-86ea-3bc70d13d7df could not be found. [ 641.858899] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 641.858899] env[61273]: INFO nova.compute.manager [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] Took 0.04 seconds to destroy the instance on the hypervisor. [ 641.858899] env[61273]: DEBUG oslo.service.loopingcall [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 641.858899] env[61273]: DEBUG nova.compute.manager [-] [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 641.858899] env[61273]: DEBUG nova.network.neutron [-] [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 641.873677] env[61273]: DEBUG nova.network.neutron [-] [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 641.874291] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 54efb7bee0a94ea59bddb99de49315ed in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 641.885253] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 54efb7bee0a94ea59bddb99de49315ed [ 641.896626] env[61273]: DEBUG nova.compute.manager [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 641.898624] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] Expecting reply to msg c731fd4975ab4f5a8ed79e423f30cc8c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 641.934905] env[61273]: DEBUG nova.compute.manager [req-cd94f1fb-e972-4707-b053-017e62ca51e3 req-a49ae2bf-a3c4-4686-a610-bed0e95421f4 service nova] [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] Received event network-changed-65fe8495-4b99-4083-8128-022b6c4de52b {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 641.935134] env[61273]: DEBUG nova.compute.manager [req-cd94f1fb-e972-4707-b053-017e62ca51e3 req-a49ae2bf-a3c4-4686-a610-bed0e95421f4 service nova] [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] Refreshing instance network info cache due to event network-changed-65fe8495-4b99-4083-8128-022b6c4de52b. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 641.935555] env[61273]: DEBUG oslo_concurrency.lockutils [req-cd94f1fb-e972-4707-b053-017e62ca51e3 req-a49ae2bf-a3c4-4686-a610-bed0e95421f4 service nova] Acquiring lock "refresh_cache-1336becb-9691-490c-86ea-3bc70d13d7df" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 641.935555] env[61273]: DEBUG oslo_concurrency.lockutils [req-cd94f1fb-e972-4707-b053-017e62ca51e3 req-a49ae2bf-a3c4-4686-a610-bed0e95421f4 service nova] Acquired lock "refresh_cache-1336becb-9691-490c-86ea-3bc70d13d7df" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 641.935746] env[61273]: DEBUG nova.network.neutron [req-cd94f1fb-e972-4707-b053-017e62ca51e3 req-a49ae2bf-a3c4-4686-a610-bed0e95421f4 service nova] [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] Refreshing network info cache for port 65fe8495-4b99-4083-8128-022b6c4de52b {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 641.936253] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-cd94f1fb-e972-4707-b053-017e62ca51e3 req-a49ae2bf-a3c4-4686-a610-bed0e95421f4 service nova] Expecting reply to msg c99d1d7a578241cabfaa0396e485e644 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 641.949406] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c99d1d7a578241cabfaa0396e485e644 [ 641.955359] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c731fd4975ab4f5a8ed79e423f30cc8c [ 642.278394] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12265984-8a61-4d3e-aae9-f466229ef8b2 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.286377] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67e35320-c261-4298-ab8f-b7b984751816 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.323740] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06bd255e-838f-425b-905f-b171de903ace {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.331455] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-685643c7-0107-49ba-8edb-ca2fe47126da {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.350583] env[61273]: DEBUG nova.compute.provider_tree [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 642.350583] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] Expecting reply to msg 241a97008ca14d96adb01d879a3677aa in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 642.353446] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 241a97008ca14d96adb01d879a3677aa [ 642.376389] env[61273]: DEBUG nova.network.neutron [-] [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 642.376973] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 1f6a0d1ce42544769d2970048805745d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 642.385491] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1f6a0d1ce42544769d2970048805745d [ 642.407383] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] Expecting reply to msg a8ff48e8761643b8a5a55c9c75d6b630 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 642.450414] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a8ff48e8761643b8a5a55c9c75d6b630 [ 642.454628] env[61273]: DEBUG nova.network.neutron [req-cd94f1fb-e972-4707-b053-017e62ca51e3 req-a49ae2bf-a3c4-4686-a610-bed0e95421f4 service nova] [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 642.515795] env[61273]: DEBUG nova.network.neutron [req-cd94f1fb-e972-4707-b053-017e62ca51e3 req-a49ae2bf-a3c4-4686-a610-bed0e95421f4 service nova] [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 642.516350] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-cd94f1fb-e972-4707-b053-017e62ca51e3 req-a49ae2bf-a3c4-4686-a610-bed0e95421f4 service nova] Expecting reply to msg a2708e1dd5c542ea9e6da469edbe9eff in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 642.524951] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a2708e1dd5c542ea9e6da469edbe9eff [ 642.850006] env[61273]: DEBUG nova.scheduler.client.report [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 642.852735] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] Expecting reply to msg 78164f5e8709452fa264b5825e776ada in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 642.867523] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 78164f5e8709452fa264b5825e776ada [ 642.879200] env[61273]: INFO nova.compute.manager [-] [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] Took 1.02 seconds to deallocate network for instance. [ 642.881371] env[61273]: DEBUG nova.compute.claims [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 642.881541] env[61273]: DEBUG oslo_concurrency.lockutils [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 642.911102] env[61273]: DEBUG nova.compute.manager [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 642.938831] env[61273]: DEBUG nova.virt.hardware [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 642.938831] env[61273]: DEBUG nova.virt.hardware [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 642.938831] env[61273]: DEBUG nova.virt.hardware [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 642.939043] env[61273]: DEBUG nova.virt.hardware [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 642.939043] env[61273]: DEBUG nova.virt.hardware [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 642.939043] env[61273]: DEBUG nova.virt.hardware [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 642.939043] env[61273]: DEBUG nova.virt.hardware [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 642.939183] env[61273]: DEBUG nova.virt.hardware [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 642.939273] env[61273]: DEBUG nova.virt.hardware [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 642.939424] env[61273]: DEBUG nova.virt.hardware [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 642.939609] env[61273]: DEBUG nova.virt.hardware [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 642.940534] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5066577-a2c1-442e-87a9-7dc4a4613575 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.950020] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-716ca084-eb0a-4a63-b816-8a697a7da299 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.963306] env[61273]: ERROR nova.compute.manager [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 17c48dca-cfb0-4cdc-9269-490a11efa464, please check neutron logs for more information. [ 642.963306] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 642.963306] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 642.963306] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 642.963306] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 642.963306] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 642.963306] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 642.963306] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 642.963306] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 642.963306] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 642.963306] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 642.963306] env[61273]: ERROR nova.compute.manager raise self.value [ 642.963306] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 642.963306] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 642.963306] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 642.963306] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 642.963749] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 642.963749] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 642.963749] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 17c48dca-cfb0-4cdc-9269-490a11efa464, please check neutron logs for more information. [ 642.963749] env[61273]: ERROR nova.compute.manager [ 642.963749] env[61273]: Traceback (most recent call last): [ 642.963749] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 642.963749] env[61273]: listener.cb(fileno) [ 642.963749] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 642.963749] env[61273]: result = function(*args, **kwargs) [ 642.963749] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 642.963749] env[61273]: return func(*args, **kwargs) [ 642.963749] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 642.963749] env[61273]: raise e [ 642.963749] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 642.963749] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 642.963749] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 642.963749] env[61273]: created_port_ids = self._update_ports_for_instance( [ 642.963749] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 642.963749] env[61273]: with excutils.save_and_reraise_exception(): [ 642.963749] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 642.963749] env[61273]: self.force_reraise() [ 642.963749] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 642.963749] env[61273]: raise self.value [ 642.963749] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 642.963749] env[61273]: updated_port = self._update_port( [ 642.963749] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 642.963749] env[61273]: _ensure_no_port_binding_failure(port) [ 642.963749] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 642.963749] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 642.964497] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 17c48dca-cfb0-4cdc-9269-490a11efa464, please check neutron logs for more information. [ 642.964497] env[61273]: Removing descriptor: 19 [ 642.964497] env[61273]: ERROR nova.compute.manager [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 17c48dca-cfb0-4cdc-9269-490a11efa464, please check neutron logs for more information. [ 642.964497] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] Traceback (most recent call last): [ 642.964497] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 642.964497] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] yield resources [ 642.964497] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 642.964497] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] self.driver.spawn(context, instance, image_meta, [ 642.964497] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 642.964497] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] self._vmops.spawn(context, instance, image_meta, injected_files, [ 642.964497] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 642.964497] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] vm_ref = self.build_virtual_machine(instance, [ 642.964934] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 642.964934] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] vif_infos = vmwarevif.get_vif_info(self._session, [ 642.964934] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 642.964934] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] for vif in network_info: [ 642.964934] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 642.964934] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] return self._sync_wrapper(fn, *args, **kwargs) [ 642.964934] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 642.964934] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] self.wait() [ 642.964934] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 642.964934] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] self[:] = self._gt.wait() [ 642.964934] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 642.964934] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] return self._exit_event.wait() [ 642.964934] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 642.965324] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] result = hub.switch() [ 642.965324] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 642.965324] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] return self.greenlet.switch() [ 642.965324] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 642.965324] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] result = function(*args, **kwargs) [ 642.965324] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 642.965324] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] return func(*args, **kwargs) [ 642.965324] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 642.965324] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] raise e [ 642.965324] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 642.965324] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] nwinfo = self.network_api.allocate_for_instance( [ 642.965324] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 642.965324] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] created_port_ids = self._update_ports_for_instance( [ 642.965716] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 642.965716] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] with excutils.save_and_reraise_exception(): [ 642.965716] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 642.965716] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] self.force_reraise() [ 642.965716] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 642.965716] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] raise self.value [ 642.965716] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 642.965716] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] updated_port = self._update_port( [ 642.965716] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 642.965716] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] _ensure_no_port_binding_failure(port) [ 642.965716] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 642.965716] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] raise exception.PortBindingFailed(port_id=port['id']) [ 642.966077] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] nova.exception.PortBindingFailed: Binding failed for port 17c48dca-cfb0-4cdc-9269-490a11efa464, please check neutron logs for more information. [ 642.966077] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] [ 642.966077] env[61273]: INFO nova.compute.manager [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] Terminating instance [ 642.971201] env[61273]: DEBUG oslo_concurrency.lockutils [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] Acquiring lock "refresh_cache-30ed4438-4f74-4bc3-a6cc-a59420751940" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 642.971201] env[61273]: DEBUG oslo_concurrency.lockutils [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] Acquired lock "refresh_cache-30ed4438-4f74-4bc3-a6cc-a59420751940" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 642.971201] env[61273]: DEBUG nova.network.neutron [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 642.971201] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] Expecting reply to msg ada3de03a66045cbaa499409ec420794 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 642.977570] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ada3de03a66045cbaa499409ec420794 [ 643.019187] env[61273]: DEBUG oslo_concurrency.lockutils [req-cd94f1fb-e972-4707-b053-017e62ca51e3 req-a49ae2bf-a3c4-4686-a610-bed0e95421f4 service nova] Releasing lock "refresh_cache-1336becb-9691-490c-86ea-3bc70d13d7df" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 643.019455] env[61273]: DEBUG nova.compute.manager [req-cd94f1fb-e972-4707-b053-017e62ca51e3 req-a49ae2bf-a3c4-4686-a610-bed0e95421f4 service nova] [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] Received event network-vif-deleted-65fe8495-4b99-4083-8128-022b6c4de52b {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 643.356352] env[61273]: DEBUG oslo_concurrency.lockutils [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.475s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 643.356352] env[61273]: DEBUG nova.compute.manager [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 643.357761] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] Expecting reply to msg d723dbf8779947648dee5591f55e88d0 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 643.358884] env[61273]: DEBUG oslo_concurrency.lockutils [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.391s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 643.360629] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg efaa44b804f14c26bb0c6186100613a4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 643.401103] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d723dbf8779947648dee5591f55e88d0 [ 643.401720] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg efaa44b804f14c26bb0c6186100613a4 [ 643.487972] env[61273]: DEBUG nova.network.neutron [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 643.593508] env[61273]: DEBUG nova.network.neutron [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 643.594105] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] Expecting reply to msg 83dce72c4d1b46efa235d2da54241189 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 643.601942] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 83dce72c4d1b46efa235d2da54241189 [ 643.863919] env[61273]: DEBUG nova.compute.utils [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 643.865045] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] Expecting reply to msg 64977262f3da422ba80fa8fce79d42e8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 643.866832] env[61273]: DEBUG nova.compute.manager [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 643.867147] env[61273]: DEBUG nova.network.neutron [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 643.875861] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 64977262f3da422ba80fa8fce79d42e8 [ 643.918951] env[61273]: DEBUG nova.policy [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e298aba9bd1946fabb634ea5c111c634', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fc437f0153524c71b3212518dc00bd2e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 643.964695] env[61273]: DEBUG nova.compute.manager [req-fe260b97-2cfb-447d-897c-485404437a14 req-7de324a7-405a-4663-8f64-237b9387590f service nova] [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] Received event network-changed-17c48dca-cfb0-4cdc-9269-490a11efa464 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 643.964894] env[61273]: DEBUG nova.compute.manager [req-fe260b97-2cfb-447d-897c-485404437a14 req-7de324a7-405a-4663-8f64-237b9387590f service nova] [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] Refreshing instance network info cache due to event network-changed-17c48dca-cfb0-4cdc-9269-490a11efa464. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 643.965082] env[61273]: DEBUG oslo_concurrency.lockutils [req-fe260b97-2cfb-447d-897c-485404437a14 req-7de324a7-405a-4663-8f64-237b9387590f service nova] Acquiring lock "refresh_cache-30ed4438-4f74-4bc3-a6cc-a59420751940" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 644.096293] env[61273]: DEBUG oslo_concurrency.lockutils [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] Releasing lock "refresh_cache-30ed4438-4f74-4bc3-a6cc-a59420751940" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 644.096765] env[61273]: DEBUG nova.compute.manager [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 644.096991] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 644.097324] env[61273]: DEBUG oslo_concurrency.lockutils [req-fe260b97-2cfb-447d-897c-485404437a14 req-7de324a7-405a-4663-8f64-237b9387590f service nova] Acquired lock "refresh_cache-30ed4438-4f74-4bc3-a6cc-a59420751940" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 644.097547] env[61273]: DEBUG nova.network.neutron [req-fe260b97-2cfb-447d-897c-485404437a14 req-7de324a7-405a-4663-8f64-237b9387590f service nova] [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] Refreshing network info cache for port 17c48dca-cfb0-4cdc-9269-490a11efa464 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 644.097976] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-fe260b97-2cfb-447d-897c-485404437a14 req-7de324a7-405a-4663-8f64-237b9387590f service nova] Expecting reply to msg b2f9f434d760422f97347a268e1b1b35 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 644.105309] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-eb0611ff-1657-4fa6-8b5f-ededbc9608d3 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.110458] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b2f9f434d760422f97347a268e1b1b35 [ 644.118471] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce31c3a6-5935-428b-b7db-25a868f580bb {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.144502] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 30ed4438-4f74-4bc3-a6cc-a59420751940 could not be found. [ 644.144753] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 644.144927] env[61273]: INFO nova.compute.manager [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] Took 0.05 seconds to destroy the instance on the hypervisor. [ 644.145160] env[61273]: DEBUG oslo.service.loopingcall [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 644.147477] env[61273]: DEBUG nova.compute.manager [-] [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 644.147567] env[61273]: DEBUG nova.network.neutron [-] [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 644.163597] env[61273]: DEBUG nova.network.neutron [-] [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 644.164116] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 57a076d8bd974f10a02e2cc0b33996d5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 644.171354] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 57a076d8bd974f10a02e2cc0b33996d5 [ 644.227985] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97ffe296-7daa-4468-94ef-cd6300f27cff {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.236041] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-864ceff7-3850-4fa4-a124-66dae9305879 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.274029] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff684ae8-b514-4d06-855f-b1ae56466a7d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.281212] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95c1bfa6-8f07-4c04-8d36-8b0d429eefd3 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.293887] env[61273]: DEBUG nova.compute.provider_tree [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 644.294382] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg cb99c53b3d8747b29b90edb6c1a4cf4f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 644.302029] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cb99c53b3d8747b29b90edb6c1a4cf4f [ 644.308147] env[61273]: DEBUG nova.network.neutron [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] Successfully created port: 7cf3ed88-f9cd-4e91-8812-0a8b7f548c5c {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 644.370028] env[61273]: DEBUG nova.compute.manager [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 644.371885] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] Expecting reply to msg 73f09b213d444b728e799536f207ff63 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 644.401479] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 73f09b213d444b728e799536f207ff63 [ 644.620976] env[61273]: DEBUG nova.network.neutron [req-fe260b97-2cfb-447d-897c-485404437a14 req-7de324a7-405a-4663-8f64-237b9387590f service nova] [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 644.671242] env[61273]: DEBUG nova.network.neutron [-] [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 644.671815] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg c470f5eed4f74ba38c232a954712a444 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 644.680263] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c470f5eed4f74ba38c232a954712a444 [ 644.689564] env[61273]: DEBUG nova.network.neutron [req-fe260b97-2cfb-447d-897c-485404437a14 req-7de324a7-405a-4663-8f64-237b9387590f service nova] [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 644.690049] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-fe260b97-2cfb-447d-897c-485404437a14 req-7de324a7-405a-4663-8f64-237b9387590f service nova] Expecting reply to msg 97e5cd8807d84305a4151e92bfb3cf4b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 644.698281] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 97e5cd8807d84305a4151e92bfb3cf4b [ 644.796875] env[61273]: DEBUG nova.scheduler.client.report [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 644.799303] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 9ccd2edea73642c8bb92c038a085b2be in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 644.810767] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9ccd2edea73642c8bb92c038a085b2be [ 644.885357] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] Expecting reply to msg c3dd1115b5f24f64810044208383cab0 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 644.915017] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c3dd1115b5f24f64810044208383cab0 [ 645.174597] env[61273]: INFO nova.compute.manager [-] [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] Took 1.03 seconds to deallocate network for instance. [ 645.177258] env[61273]: DEBUG nova.compute.claims [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 645.177441] env[61273]: DEBUG oslo_concurrency.lockutils [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 645.194077] env[61273]: DEBUG oslo_concurrency.lockutils [req-fe260b97-2cfb-447d-897c-485404437a14 req-7de324a7-405a-4663-8f64-237b9387590f service nova] Releasing lock "refresh_cache-30ed4438-4f74-4bc3-a6cc-a59420751940" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 645.194311] env[61273]: DEBUG nova.compute.manager [req-fe260b97-2cfb-447d-897c-485404437a14 req-7de324a7-405a-4663-8f64-237b9387590f service nova] [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] Received event network-vif-deleted-17c48dca-cfb0-4cdc-9269-490a11efa464 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 645.302535] env[61273]: DEBUG oslo_concurrency.lockutils [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.944s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 645.303206] env[61273]: ERROR nova.compute.manager [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port f50cebee-32bc-48a5-94cd-9978e48c02f5, please check neutron logs for more information. [ 645.303206] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] Traceback (most recent call last): [ 645.303206] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 645.303206] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] self.driver.spawn(context, instance, image_meta, [ 645.303206] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 645.303206] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 645.303206] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 645.303206] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] vm_ref = self.build_virtual_machine(instance, [ 645.303206] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 645.303206] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] vif_infos = vmwarevif.get_vif_info(self._session, [ 645.303206] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 645.303561] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] for vif in network_info: [ 645.303561] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 645.303561] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] return self._sync_wrapper(fn, *args, **kwargs) [ 645.303561] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 645.303561] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] self.wait() [ 645.303561] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 645.303561] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] self[:] = self._gt.wait() [ 645.303561] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 645.303561] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] return self._exit_event.wait() [ 645.303561] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 645.303561] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] result = hub.switch() [ 645.303561] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 645.303561] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] return self.greenlet.switch() [ 645.303967] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 645.303967] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] result = function(*args, **kwargs) [ 645.303967] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 645.303967] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] return func(*args, **kwargs) [ 645.303967] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 645.303967] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] raise e [ 645.303967] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 645.303967] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] nwinfo = self.network_api.allocate_for_instance( [ 645.303967] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 645.303967] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] created_port_ids = self._update_ports_for_instance( [ 645.303967] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 645.303967] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] with excutils.save_and_reraise_exception(): [ 645.303967] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 645.304358] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] self.force_reraise() [ 645.304358] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 645.304358] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] raise self.value [ 645.304358] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 645.304358] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] updated_port = self._update_port( [ 645.304358] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 645.304358] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] _ensure_no_port_binding_failure(port) [ 645.304358] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 645.304358] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] raise exception.PortBindingFailed(port_id=port['id']) [ 645.304358] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] nova.exception.PortBindingFailed: Binding failed for port f50cebee-32bc-48a5-94cd-9978e48c02f5, please check neutron logs for more information. [ 645.304358] env[61273]: ERROR nova.compute.manager [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] [ 645.304715] env[61273]: DEBUG nova.compute.utils [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] Binding failed for port f50cebee-32bc-48a5-94cd-9978e48c02f5, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 645.305117] env[61273]: DEBUG oslo_concurrency.lockutils [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.701s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 645.306505] env[61273]: INFO nova.compute.claims [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] [instance: 109fc11e-d640-4617-99a3-0defe0a5aa6c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 645.308131] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Expecting reply to msg d96d62d5e7f4459bad566f737633e06d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 645.309718] env[61273]: DEBUG nova.compute.manager [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] Build of instance 017b1da4-7c9b-477d-92a3-29b2248317d3 was re-scheduled: Binding failed for port f50cebee-32bc-48a5-94cd-9978e48c02f5, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 645.310246] env[61273]: DEBUG nova.compute.manager [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 645.310473] env[61273]: DEBUG oslo_concurrency.lockutils [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Acquiring lock "refresh_cache-017b1da4-7c9b-477d-92a3-29b2248317d3" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 645.310619] env[61273]: DEBUG oslo_concurrency.lockutils [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Acquired lock "refresh_cache-017b1da4-7c9b-477d-92a3-29b2248317d3" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 645.310778] env[61273]: DEBUG nova.network.neutron [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 645.311145] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 6f0bb099fe97425fadffeb5aa25a9aec in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 645.321443] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6f0bb099fe97425fadffeb5aa25a9aec [ 645.343887] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d96d62d5e7f4459bad566f737633e06d [ 645.394093] env[61273]: DEBUG nova.compute.manager [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 645.423523] env[61273]: DEBUG nova.virt.hardware [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 645.423523] env[61273]: DEBUG nova.virt.hardware [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 645.423523] env[61273]: DEBUG nova.virt.hardware [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 645.426475] env[61273]: DEBUG nova.virt.hardware [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 645.426475] env[61273]: DEBUG nova.virt.hardware [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 645.426475] env[61273]: DEBUG nova.virt.hardware [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 645.426475] env[61273]: DEBUG nova.virt.hardware [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 645.426668] env[61273]: DEBUG nova.virt.hardware [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 645.430253] env[61273]: DEBUG nova.virt.hardware [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 645.430253] env[61273]: DEBUG nova.virt.hardware [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 645.430253] env[61273]: DEBUG nova.virt.hardware [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 645.430253] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24121404-ef3a-46c0-9731-4b4065827c16 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.437847] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fa71f46-c0ee-42cf-ab37-486a46089cae {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.500043] env[61273]: ERROR nova.compute.manager [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 7cf3ed88-f9cd-4e91-8812-0a8b7f548c5c, please check neutron logs for more information. [ 645.500043] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 645.500043] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 645.500043] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 645.500043] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 645.500043] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 645.500043] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 645.500043] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 645.500043] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 645.500043] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 645.500043] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 645.500043] env[61273]: ERROR nova.compute.manager raise self.value [ 645.500043] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 645.500043] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 645.500043] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 645.500043] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 645.500576] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 645.500576] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 645.500576] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 7cf3ed88-f9cd-4e91-8812-0a8b7f548c5c, please check neutron logs for more information. [ 645.500576] env[61273]: ERROR nova.compute.manager [ 645.500576] env[61273]: Traceback (most recent call last): [ 645.500576] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 645.500576] env[61273]: listener.cb(fileno) [ 645.500576] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 645.500576] env[61273]: result = function(*args, **kwargs) [ 645.500576] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 645.500576] env[61273]: return func(*args, **kwargs) [ 645.500576] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 645.500576] env[61273]: raise e [ 645.500576] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 645.500576] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 645.500576] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 645.500576] env[61273]: created_port_ids = self._update_ports_for_instance( [ 645.500576] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 645.500576] env[61273]: with excutils.save_and_reraise_exception(): [ 645.500576] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 645.500576] env[61273]: self.force_reraise() [ 645.500576] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 645.500576] env[61273]: raise self.value [ 645.500576] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 645.500576] env[61273]: updated_port = self._update_port( [ 645.500576] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 645.500576] env[61273]: _ensure_no_port_binding_failure(port) [ 645.500576] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 645.500576] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 645.501426] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 7cf3ed88-f9cd-4e91-8812-0a8b7f548c5c, please check neutron logs for more information. [ 645.501426] env[61273]: Removing descriptor: 19 [ 645.501426] env[61273]: ERROR nova.compute.manager [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 7cf3ed88-f9cd-4e91-8812-0a8b7f548c5c, please check neutron logs for more information. [ 645.501426] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] Traceback (most recent call last): [ 645.501426] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 645.501426] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] yield resources [ 645.501426] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 645.501426] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] self.driver.spawn(context, instance, image_meta, [ 645.501426] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 645.501426] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] self._vmops.spawn(context, instance, image_meta, injected_files, [ 645.501426] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 645.501426] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] vm_ref = self.build_virtual_machine(instance, [ 645.501866] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 645.501866] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] vif_infos = vmwarevif.get_vif_info(self._session, [ 645.501866] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 645.501866] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] for vif in network_info: [ 645.501866] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 645.501866] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] return self._sync_wrapper(fn, *args, **kwargs) [ 645.501866] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 645.501866] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] self.wait() [ 645.501866] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 645.501866] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] self[:] = self._gt.wait() [ 645.501866] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 645.501866] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] return self._exit_event.wait() [ 645.501866] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 645.502260] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] result = hub.switch() [ 645.502260] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 645.502260] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] return self.greenlet.switch() [ 645.502260] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 645.502260] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] result = function(*args, **kwargs) [ 645.502260] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 645.502260] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] return func(*args, **kwargs) [ 645.502260] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 645.502260] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] raise e [ 645.502260] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 645.502260] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] nwinfo = self.network_api.allocate_for_instance( [ 645.502260] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 645.502260] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] created_port_ids = self._update_ports_for_instance( [ 645.502701] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 645.502701] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] with excutils.save_and_reraise_exception(): [ 645.502701] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 645.502701] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] self.force_reraise() [ 645.502701] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 645.502701] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] raise self.value [ 645.502701] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 645.502701] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] updated_port = self._update_port( [ 645.502701] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 645.502701] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] _ensure_no_port_binding_failure(port) [ 645.502701] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 645.502701] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] raise exception.PortBindingFailed(port_id=port['id']) [ 645.503069] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] nova.exception.PortBindingFailed: Binding failed for port 7cf3ed88-f9cd-4e91-8812-0a8b7f548c5c, please check neutron logs for more information. [ 645.503069] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] [ 645.503069] env[61273]: INFO nova.compute.manager [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] Terminating instance [ 645.503069] env[61273]: DEBUG oslo_concurrency.lockutils [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] Acquiring lock "refresh_cache-767d7956-954b-4be7-8cc6-45872ff4cfce" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 645.503069] env[61273]: DEBUG oslo_concurrency.lockutils [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] Acquired lock "refresh_cache-767d7956-954b-4be7-8cc6-45872ff4cfce" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 645.503069] env[61273]: DEBUG nova.network.neutron [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 645.503256] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] Expecting reply to msg 27160f3c2fba4de29fb3495bca2e5236 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 645.509687] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 27160f3c2fba4de29fb3495bca2e5236 [ 645.811109] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Expecting reply to msg 18d1b132054345c5ac25727d06437b24 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 645.819478] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 18d1b132054345c5ac25727d06437b24 [ 645.831322] env[61273]: DEBUG nova.network.neutron [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 645.933219] env[61273]: DEBUG nova.network.neutron [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 645.933808] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 1ffbd1d8b8864629a3698273019db93b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 645.942730] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1ffbd1d8b8864629a3698273019db93b [ 646.013898] env[61273]: DEBUG nova.compute.manager [req-d31a4a2a-af5e-4645-9ad0-e24d614b7c7f req-0293f43e-a395-46f2-a334-c1755676a9a3 service nova] [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] Received event network-changed-7cf3ed88-f9cd-4e91-8812-0a8b7f548c5c {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 646.014151] env[61273]: DEBUG nova.compute.manager [req-d31a4a2a-af5e-4645-9ad0-e24d614b7c7f req-0293f43e-a395-46f2-a334-c1755676a9a3 service nova] [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] Refreshing instance network info cache due to event network-changed-7cf3ed88-f9cd-4e91-8812-0a8b7f548c5c. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 646.014358] env[61273]: DEBUG oslo_concurrency.lockutils [req-d31a4a2a-af5e-4645-9ad0-e24d614b7c7f req-0293f43e-a395-46f2-a334-c1755676a9a3 service nova] Acquiring lock "refresh_cache-767d7956-954b-4be7-8cc6-45872ff4cfce" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 646.020530] env[61273]: DEBUG nova.network.neutron [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 646.096734] env[61273]: DEBUG nova.network.neutron [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 646.097247] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] Expecting reply to msg dc42a772a9d4451781f1e9b2e173e982 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 646.105435] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dc42a772a9d4451781f1e9b2e173e982 [ 646.435947] env[61273]: DEBUG oslo_concurrency.lockutils [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Releasing lock "refresh_cache-017b1da4-7c9b-477d-92a3-29b2248317d3" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 646.436244] env[61273]: DEBUG nova.compute.manager [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 646.436431] env[61273]: DEBUG nova.compute.manager [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 646.436597] env[61273]: DEBUG nova.network.neutron [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 646.459051] env[61273]: DEBUG nova.network.neutron [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 646.459621] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 91cc9ac0d47e44f291e2fb0609d794d1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 646.468317] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 91cc9ac0d47e44f291e2fb0609d794d1 [ 646.600228] env[61273]: DEBUG oslo_concurrency.lockutils [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] Releasing lock "refresh_cache-767d7956-954b-4be7-8cc6-45872ff4cfce" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 646.600631] env[61273]: DEBUG nova.compute.manager [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 646.600827] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 646.601611] env[61273]: DEBUG oslo_concurrency.lockutils [req-d31a4a2a-af5e-4645-9ad0-e24d614b7c7f req-0293f43e-a395-46f2-a334-c1755676a9a3 service nova] Acquired lock "refresh_cache-767d7956-954b-4be7-8cc6-45872ff4cfce" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 646.601753] env[61273]: DEBUG nova.network.neutron [req-d31a4a2a-af5e-4645-9ad0-e24d614b7c7f req-0293f43e-a395-46f2-a334-c1755676a9a3 service nova] [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] Refreshing network info cache for port 7cf3ed88-f9cd-4e91-8812-0a8b7f548c5c {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 646.602085] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-d31a4a2a-af5e-4645-9ad0-e24d614b7c7f req-0293f43e-a395-46f2-a334-c1755676a9a3 service nova] Expecting reply to msg f7c2bf1c98bf456698657c8e56e17f46 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 646.602853] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1d17ea17-9e6f-49ea-a3e8-08ce4f737f13 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.612108] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f7c2bf1c98bf456698657c8e56e17f46 [ 646.616316] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c652698e-1763-4ea9-ac0c-ef13af38fc19 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.627709] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a0b61d8-3221-4907-94f0-025082945afc {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.636766] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5c8be1d-397d-4a84-a718-c96c8dd2bfd6 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.643915] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 767d7956-954b-4be7-8cc6-45872ff4cfce could not be found. [ 646.644199] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 646.644440] env[61273]: INFO nova.compute.manager [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] Took 0.04 seconds to destroy the instance on the hypervisor. [ 646.644726] env[61273]: DEBUG oslo.service.loopingcall [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 646.645283] env[61273]: DEBUG nova.compute.manager [-] [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 646.645476] env[61273]: DEBUG nova.network.neutron [-] [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 646.673632] env[61273]: DEBUG nova.network.neutron [-] [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 646.674211] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg ec6827f916944f4c9435acd8c8bc09ce in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 646.678632] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88d27942-eb1b-431d-9b89-39670cd99cac {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.682664] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-047248a0-dc2c-454c-b74d-16eba75829be {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.687074] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ec6827f916944f4c9435acd8c8bc09ce [ 646.698130] env[61273]: DEBUG nova.compute.provider_tree [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 646.698588] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Expecting reply to msg 9349785aed2849d0813b4afc2ce2a746 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 646.705197] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9349785aed2849d0813b4afc2ce2a746 [ 646.973679] env[61273]: DEBUG nova.network.neutron [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 646.973679] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 935d8371727e4732a09896f7c584baa6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 646.973679] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 935d8371727e4732a09896f7c584baa6 [ 647.125683] env[61273]: DEBUG nova.network.neutron [req-d31a4a2a-af5e-4645-9ad0-e24d614b7c7f req-0293f43e-a395-46f2-a334-c1755676a9a3 service nova] [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 647.180248] env[61273]: DEBUG nova.network.neutron [-] [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 647.180248] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg d542a2d8a1984c2ea55c956092feb65b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 647.188240] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d542a2d8a1984c2ea55c956092feb65b [ 647.201468] env[61273]: DEBUG nova.network.neutron [req-d31a4a2a-af5e-4645-9ad0-e24d614b7c7f req-0293f43e-a395-46f2-a334-c1755676a9a3 service nova] [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 647.201468] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-d31a4a2a-af5e-4645-9ad0-e24d614b7c7f req-0293f43e-a395-46f2-a334-c1755676a9a3 service nova] Expecting reply to msg e35461bf31ea4847b3804d72bc18a600 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 647.204317] env[61273]: DEBUG nova.scheduler.client.report [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 647.208087] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Expecting reply to msg 316e33d663ef4f86883a433e73d83443 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 647.210679] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e35461bf31ea4847b3804d72bc18a600 [ 647.223031] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 316e33d663ef4f86883a433e73d83443 [ 647.474187] env[61273]: INFO nova.compute.manager [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 017b1da4-7c9b-477d-92a3-29b2248317d3] Took 1.04 seconds to deallocate network for instance. [ 647.476262] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 498171af571f4db18d318785dd9719ba in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 647.511451] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 498171af571f4db18d318785dd9719ba [ 647.682146] env[61273]: INFO nova.compute.manager [-] [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] Took 1.04 seconds to deallocate network for instance. [ 647.684756] env[61273]: DEBUG nova.compute.claims [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 647.684941] env[61273]: DEBUG oslo_concurrency.lockutils [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 647.702751] env[61273]: DEBUG oslo_concurrency.lockutils [req-d31a4a2a-af5e-4645-9ad0-e24d614b7c7f req-0293f43e-a395-46f2-a334-c1755676a9a3 service nova] Releasing lock "refresh_cache-767d7956-954b-4be7-8cc6-45872ff4cfce" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 647.703027] env[61273]: DEBUG nova.compute.manager [req-d31a4a2a-af5e-4645-9ad0-e24d614b7c7f req-0293f43e-a395-46f2-a334-c1755676a9a3 service nova] [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] Received event network-vif-deleted-7cf3ed88-f9cd-4e91-8812-0a8b7f548c5c {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 647.710969] env[61273]: DEBUG oslo_concurrency.lockutils [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.406s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 647.711449] env[61273]: DEBUG nova.compute.manager [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] [instance: 109fc11e-d640-4617-99a3-0defe0a5aa6c] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 647.713598] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Expecting reply to msg 09c7e92c9b7a43acbc49e4da0506b47d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 647.714327] env[61273]: DEBUG oslo_concurrency.lockutils [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.984s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 647.715971] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] Expecting reply to msg 57d15729d67b439abbf9c3afc21d5674 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 647.744951] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 09c7e92c9b7a43acbc49e4da0506b47d [ 647.750832] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 57d15729d67b439abbf9c3afc21d5674 [ 647.981087] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 3b11f0634bb6433cb0a43959fa5b8ecb in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 648.016768] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3b11f0634bb6433cb0a43959fa5b8ecb [ 648.218871] env[61273]: DEBUG nova.compute.utils [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 648.219589] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Expecting reply to msg f2f2ab9707e7487d92a149f486430a21 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 648.223506] env[61273]: DEBUG nova.compute.manager [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] [instance: 109fc11e-d640-4617-99a3-0defe0a5aa6c] Not allocating networking since 'none' was specified. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 648.229585] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f2f2ab9707e7487d92a149f486430a21 [ 648.498843] env[61273]: INFO nova.scheduler.client.report [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Deleted allocations for instance 017b1da4-7c9b-477d-92a3-29b2248317d3 [ 648.506688] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 9083f1a133e24315bfdcc6f2551dd3df in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 648.520797] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9083f1a133e24315bfdcc6f2551dd3df [ 648.568958] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1602d28-8621-4d25-af43-dee7201d3f06 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.577185] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e368c79c-d806-4a32-87f4-046bf8235e91 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.607470] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0802a66a-3926-4238-b593-91add8084925 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.614479] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a0f9502-fb9c-4b10-a033-4ce4543732e0 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.627275] env[61273]: DEBUG nova.compute.provider_tree [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 648.627773] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] Expecting reply to msg 26c807e51aaa40ac97906351e3437256 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 648.636200] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 26c807e51aaa40ac97906351e3437256 [ 648.724650] env[61273]: DEBUG nova.compute.manager [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] [instance: 109fc11e-d640-4617-99a3-0defe0a5aa6c] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 648.726345] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Expecting reply to msg e1c93c8c502b42efbd54d0fee96bcdc7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 648.758476] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e1c93c8c502b42efbd54d0fee96bcdc7 [ 649.008227] env[61273]: DEBUG oslo_concurrency.lockutils [None req-cce9a24c-b562-446f-81d0-0af97fd3dc3f tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Lock "017b1da4-7c9b-477d-92a3-29b2248317d3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 99.822s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 649.008823] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] Expecting reply to msg 649dbb13f6124280b5543d987ac2f072 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 649.017694] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 649dbb13f6124280b5543d987ac2f072 [ 649.131502] env[61273]: DEBUG nova.scheduler.client.report [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 649.134098] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] Expecting reply to msg bc49e771c65d48e6952a0ef5600391b2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 649.152677] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bc49e771c65d48e6952a0ef5600391b2 [ 649.230944] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Expecting reply to msg d28cc641dde94deebd031035b45a1324 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 649.273312] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d28cc641dde94deebd031035b45a1324 [ 649.511481] env[61273]: DEBUG nova.compute.manager [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 649.513295] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] Expecting reply to msg d792ef7b276748eb918351aaf8234d5c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 649.546114] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d792ef7b276748eb918351aaf8234d5c [ 649.636940] env[61273]: DEBUG oslo_concurrency.lockutils [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.922s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 649.637641] env[61273]: ERROR nova.compute.manager [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 35e64d9a-ac71-4a0f-abb8-c800619dff5a, please check neutron logs for more information. [ 649.637641] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] Traceback (most recent call last): [ 649.637641] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 649.637641] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] self.driver.spawn(context, instance, image_meta, [ 649.637641] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 649.637641] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] self._vmops.spawn(context, instance, image_meta, injected_files, [ 649.637641] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 649.637641] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] vm_ref = self.build_virtual_machine(instance, [ 649.637641] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 649.637641] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] vif_infos = vmwarevif.get_vif_info(self._session, [ 649.637641] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 649.637979] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] for vif in network_info: [ 649.637979] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 649.637979] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] return self._sync_wrapper(fn, *args, **kwargs) [ 649.637979] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 649.637979] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] self.wait() [ 649.637979] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 649.637979] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] self[:] = self._gt.wait() [ 649.637979] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 649.637979] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] return self._exit_event.wait() [ 649.637979] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 649.637979] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] current.throw(*self._exc) [ 649.637979] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 649.637979] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] result = function(*args, **kwargs) [ 649.638457] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 649.638457] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] return func(*args, **kwargs) [ 649.638457] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 649.638457] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] raise e [ 649.638457] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 649.638457] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] nwinfo = self.network_api.allocate_for_instance( [ 649.638457] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 649.638457] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] created_port_ids = self._update_ports_for_instance( [ 649.638457] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 649.638457] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] with excutils.save_and_reraise_exception(): [ 649.638457] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 649.638457] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] self.force_reraise() [ 649.638457] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 649.638841] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] raise self.value [ 649.638841] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 649.638841] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] updated_port = self._update_port( [ 649.638841] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 649.638841] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] _ensure_no_port_binding_failure(port) [ 649.638841] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 649.638841] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] raise exception.PortBindingFailed(port_id=port['id']) [ 649.638841] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] nova.exception.PortBindingFailed: Binding failed for port 35e64d9a-ac71-4a0f-abb8-c800619dff5a, please check neutron logs for more information. [ 649.638841] env[61273]: ERROR nova.compute.manager [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] [ 649.638841] env[61273]: DEBUG nova.compute.utils [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] Binding failed for port 35e64d9a-ac71-4a0f-abb8-c800619dff5a, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 649.639698] env[61273]: DEBUG oslo_concurrency.lockutils [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.443s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 649.641280] env[61273]: INFO nova.compute.claims [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 649.642891] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] Expecting reply to msg 4866c4c39ab045618f818efd7f2d6087 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 649.644151] env[61273]: DEBUG nova.compute.manager [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] Build of instance 32b57d1b-d35f-488e-be23-9119f2f56562 was re-scheduled: Binding failed for port 35e64d9a-ac71-4a0f-abb8-c800619dff5a, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 649.644590] env[61273]: DEBUG nova.compute.manager [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 649.644813] env[61273]: DEBUG oslo_concurrency.lockutils [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] Acquiring lock "refresh_cache-32b57d1b-d35f-488e-be23-9119f2f56562" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 649.644956] env[61273]: DEBUG oslo_concurrency.lockutils [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] Acquired lock "refresh_cache-32b57d1b-d35f-488e-be23-9119f2f56562" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 649.645117] env[61273]: DEBUG nova.network.neutron [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 649.645474] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] Expecting reply to msg a63f6b186f514d7cbd00eba048a3ca12 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 649.653315] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a63f6b186f514d7cbd00eba048a3ca12 [ 649.673715] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4866c4c39ab045618f818efd7f2d6087 [ 649.734702] env[61273]: DEBUG nova.compute.manager [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] [instance: 109fc11e-d640-4617-99a3-0defe0a5aa6c] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 649.757657] env[61273]: DEBUG nova.virt.hardware [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 649.757917] env[61273]: DEBUG nova.virt.hardware [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 649.758399] env[61273]: DEBUG nova.virt.hardware [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 649.758623] env[61273]: DEBUG nova.virt.hardware [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 649.761019] env[61273]: DEBUG nova.virt.hardware [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 649.761019] env[61273]: DEBUG nova.virt.hardware [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 649.761019] env[61273]: DEBUG nova.virt.hardware [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 649.761019] env[61273]: DEBUG nova.virt.hardware [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 649.761019] env[61273]: DEBUG nova.virt.hardware [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 649.761202] env[61273]: DEBUG nova.virt.hardware [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 649.761202] env[61273]: DEBUG nova.virt.hardware [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 649.761202] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8542569c-b141-4a01-8349-88a16feb9de0 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.769046] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24620029-03f7-4e09-aff4-429f1c56d978 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.783714] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] [instance: 109fc11e-d640-4617-99a3-0defe0a5aa6c] Instance VIF info [] {{(pid=61273) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 649.790154] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Creating folder: Project (b8db06aa9af6414d814c84d2bb435052). Parent ref: group-v103328. {{(pid=61273) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 649.790462] env[61273]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c7822fd0-b98f-4299-aafa-ed73af13d5db {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.800766] env[61273]: INFO nova.virt.vmwareapi.vm_util [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Created folder: Project (b8db06aa9af6414d814c84d2bb435052) in parent group-v103328. [ 649.800980] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Creating folder: Instances. Parent ref: group-v103345. {{(pid=61273) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 649.801212] env[61273]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9552e5a0-d9d6-4581-9ef0-f5e7ded39a19 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.809605] env[61273]: INFO nova.virt.vmwareapi.vm_util [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Created folder: Instances in parent group-v103345. [ 649.809840] env[61273]: DEBUG oslo.service.loopingcall [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 649.810026] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 109fc11e-d640-4617-99a3-0defe0a5aa6c] Creating VM on the ESX host {{(pid=61273) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 649.810226] env[61273]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bb68db07-0517-41c8-9d82-e4de5adad3b4 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.827207] env[61273]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 649.827207] env[61273]: value = "task-375285" [ 649.827207] env[61273]: _type = "Task" [ 649.827207] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.834491] env[61273]: DEBUG oslo_vmware.api [-] Task: {'id': task-375285, 'name': CreateVM_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.032107] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 650.148912] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] Expecting reply to msg d009e20e45924021829ef932b82297fc in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 650.158797] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d009e20e45924021829ef932b82297fc [ 650.185910] env[61273]: DEBUG nova.network.neutron [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 650.296082] env[61273]: DEBUG nova.network.neutron [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 650.296639] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] Expecting reply to msg 68575297b25b4706bcc553d6c66cd2e7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 650.304630] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 68575297b25b4706bcc553d6c66cd2e7 [ 650.338207] env[61273]: DEBUG oslo_vmware.api [-] Task: {'id': task-375285, 'name': CreateVM_Task, 'duration_secs': 0.264365} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.338207] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 109fc11e-d640-4617-99a3-0defe0a5aa6c] Created VM on the ESX host {{(pid=61273) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 650.338207] env[61273]: DEBUG oslo_concurrency.lockutils [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 650.338377] env[61273]: DEBUG oslo_concurrency.lockutils [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 650.338678] env[61273]: DEBUG oslo_concurrency.lockutils [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 650.338914] env[61273]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-582da2f4-ce53-4c45-b6f1-7ea5beb7973b {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.343302] env[61273]: DEBUG oslo_vmware.api [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Waiting for the task: (returnval){ [ 650.343302] env[61273]: value = "session[527e49f3-ce92-061c-8407-29c8a2392124]523b32c7-d72d-2555-0f4a-6db828c439fe" [ 650.343302] env[61273]: _type = "Task" [ 650.343302] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.350365] env[61273]: DEBUG oslo_vmware.api [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]523b32c7-d72d-2555-0f4a-6db828c439fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.745418] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Acquiring lock "c376b161-74f9-405a-bb86-516583a9a76f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 650.745651] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Lock "c376b161-74f9-405a-bb86-516583a9a76f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 650.867112] env[61273]: DEBUG oslo_concurrency.lockutils [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] Releasing lock "refresh_cache-32b57d1b-d35f-488e-be23-9119f2f56562" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 650.867112] env[61273]: DEBUG nova.compute.manager [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 650.867112] env[61273]: DEBUG nova.compute.manager [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 650.867112] env[61273]: DEBUG nova.network.neutron [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 650.867112] env[61273]: DEBUG nova.network.neutron [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 650.867874] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] Expecting reply to msg edf3fea0ddf340d3af7c9b8d0d9e8d01 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 650.867874] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg edf3fea0ddf340d3af7c9b8d0d9e8d01 [ 650.867874] env[61273]: DEBUG oslo_vmware.api [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]523b32c7-d72d-2555-0f4a-6db828c439fe, 'name': SearchDatastore_Task, 'duration_secs': 0.010444} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.867874] env[61273]: DEBUG oslo_concurrency.lockutils [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 650.867874] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] [instance: 109fc11e-d640-4617-99a3-0defe0a5aa6c] Processing image 4a9e718e-a6a1-4b4a-b567-8e55529b2d5b {{(pid=61273) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 650.868195] env[61273]: DEBUG oslo_concurrency.lockutils [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 650.868195] env[61273]: DEBUG oslo_concurrency.lockutils [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 650.868195] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61273) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 650.868195] env[61273]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b8adf33c-ec7d-4f8a-8052-f1112e78b008 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.868195] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61273) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 650.868839] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61273) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 650.868839] env[61273]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c98a4bba-f6a2-4503-a501-764829fca364 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.873210] env[61273]: DEBUG oslo_vmware.api [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Waiting for the task: (returnval){ [ 650.873210] env[61273]: value = "session[527e49f3-ce92-061c-8407-29c8a2392124]52d972eb-8355-75e6-a526-112948e6c80a" [ 650.873210] env[61273]: _type = "Task" [ 650.873210] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.883738] env[61273]: DEBUG oslo_vmware.api [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]52d972eb-8355-75e6-a526-112948e6c80a, 'name': SearchDatastore_Task} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.884549] env[61273]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-828064fb-6138-4743-bbd7-cbfb81d1964b {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.889320] env[61273]: DEBUG oslo_vmware.api [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Waiting for the task: (returnval){ [ 650.889320] env[61273]: value = "session[527e49f3-ce92-061c-8407-29c8a2392124]5227559d-f34e-392b-95fb-5e0950c565b3" [ 650.889320] env[61273]: _type = "Task" [ 650.889320] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.903464] env[61273]: DEBUG oslo_vmware.api [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]5227559d-f34e-392b-95fb-5e0950c565b3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.059111] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34bb5789-03c6-4e87-9f59-9be9c2b42f56 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.068220] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42035c0b-5466-41ba-9fc1-6377f6117442 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.097692] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96df6ebb-46f9-45d7-ba88-0ae758c3c619 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.106085] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1853b53-fb02-4bfc-b4f3-6ca245182564 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.118531] env[61273]: DEBUG nova.compute.provider_tree [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 651.119029] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] Expecting reply to msg 2c81e85af5b94af7a50f728aefd3ec60 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 651.126163] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2c81e85af5b94af7a50f728aefd3ec60 [ 651.334000] env[61273]: DEBUG nova.network.neutron [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 651.334525] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] Expecting reply to msg 99cac3028d3842fe8c05711a43113812 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 651.345929] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 99cac3028d3842fe8c05711a43113812 [ 651.400669] env[61273]: DEBUG oslo_vmware.api [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]5227559d-f34e-392b-95fb-5e0950c565b3, 'name': SearchDatastore_Task, 'duration_secs': 0.007983} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.400930] env[61273]: DEBUG oslo_concurrency.lockutils [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 651.401189] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk to [datastore1] 109fc11e-d640-4617-99a3-0defe0a5aa6c/109fc11e-d640-4617-99a3-0defe0a5aa6c.vmdk {{(pid=61273) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 651.401433] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0a458af7-3f0b-4f97-b9a0-ae2be1cd49c0 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.408467] env[61273]: DEBUG oslo_vmware.api [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Waiting for the task: (returnval){ [ 651.408467] env[61273]: value = "task-375286" [ 651.408467] env[61273]: _type = "Task" [ 651.408467] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.418393] env[61273]: DEBUG oslo_vmware.api [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Task: {'id': task-375286, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.621807] env[61273]: DEBUG nova.scheduler.client.report [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 651.624514] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] Expecting reply to msg 304c0465493a4fcca474b4b624730d36 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 651.639073] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 304c0465493a4fcca474b4b624730d36 [ 651.840491] env[61273]: INFO nova.compute.manager [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] [instance: 32b57d1b-d35f-488e-be23-9119f2f56562] Took 1.04 seconds to deallocate network for instance. [ 651.843191] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] Expecting reply to msg 82dbfb03a19448ef83f59a519e79124b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 651.886601] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 82dbfb03a19448ef83f59a519e79124b [ 651.919198] env[61273]: DEBUG oslo_vmware.api [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Task: {'id': task-375286, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.487923} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.919497] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk to [datastore1] 109fc11e-d640-4617-99a3-0defe0a5aa6c/109fc11e-d640-4617-99a3-0defe0a5aa6c.vmdk {{(pid=61273) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 651.919726] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] [instance: 109fc11e-d640-4617-99a3-0defe0a5aa6c] Extending root virtual disk to 1048576 {{(pid=61273) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 651.920270] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c10e69cf-a20a-478d-8755-46e06bc289b4 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.926117] env[61273]: DEBUG oslo_vmware.api [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Waiting for the task: (returnval){ [ 651.926117] env[61273]: value = "task-375287" [ 651.926117] env[61273]: _type = "Task" [ 651.926117] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.933192] env[61273]: DEBUG oslo_vmware.api [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Task: {'id': task-375287, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.127562] env[61273]: DEBUG oslo_concurrency.lockutils [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.488s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 652.128224] env[61273]: DEBUG nova.compute.manager [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 652.130089] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] Expecting reply to msg 04f43f5705b44fb49181c2c882208a16 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 652.131229] env[61273]: DEBUG oslo_concurrency.lockutils [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.587s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 652.133392] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Expecting reply to msg 942349b7cafb41749bbb17cfce0377ae in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 652.164797] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 04f43f5705b44fb49181c2c882208a16 [ 652.175232] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 942349b7cafb41749bbb17cfce0377ae [ 652.348143] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] Expecting reply to msg 3c41bcd095ee498780302098f53f3bc0 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 652.380912] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3c41bcd095ee498780302098f53f3bc0 [ 652.436604] env[61273]: DEBUG oslo_vmware.api [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Task: {'id': task-375287, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073822} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.437049] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] [instance: 109fc11e-d640-4617-99a3-0defe0a5aa6c] Extended root virtual disk {{(pid=61273) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 652.437904] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0341ad95-e4c5-4361-b08a-e5d0eaa7427c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.458592] env[61273]: DEBUG nova.virt.vmwareapi.volumeops [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] [instance: 109fc11e-d640-4617-99a3-0defe0a5aa6c] Reconfiguring VM instance instance-0000001e to attach disk [datastore1] 109fc11e-d640-4617-99a3-0defe0a5aa6c/109fc11e-d640-4617-99a3-0defe0a5aa6c.vmdk or device None with type sparse {{(pid=61273) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 652.458870] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c45d999b-e1f6-46fb-bc61-ea96b4d174f6 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.478913] env[61273]: DEBUG oslo_vmware.api [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Waiting for the task: (returnval){ [ 652.478913] env[61273]: value = "task-375288" [ 652.478913] env[61273]: _type = "Task" [ 652.478913] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.486679] env[61273]: DEBUG oslo_vmware.api [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Task: {'id': task-375288, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.635813] env[61273]: DEBUG nova.compute.utils [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 652.636496] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] Expecting reply to msg 63dbaa333c9b40fa9b82dff55614431b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 652.637751] env[61273]: DEBUG nova.compute.manager [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 652.637948] env[61273]: DEBUG nova.network.neutron [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 652.648165] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 63dbaa333c9b40fa9b82dff55614431b [ 652.694065] env[61273]: DEBUG nova.policy [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ceb5d8ab82344fe9baf8b970bf3a475a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c7f053eccf8842afb90d2bece96f9311', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 652.865748] env[61273]: INFO nova.scheduler.client.report [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] Deleted allocations for instance 32b57d1b-d35f-488e-be23-9119f2f56562 [ 652.874493] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] Expecting reply to msg 996987fa460e4b2e935c340fa608fe32 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 652.887161] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 996987fa460e4b2e935c340fa608fe32 [ 652.983965] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd8cba8c-2df8-4869-92e8-effbcd500f0c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.993264] env[61273]: DEBUG oslo_vmware.api [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Task: {'id': task-375288, 'name': ReconfigVM_Task, 'duration_secs': 0.268278} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.994914] env[61273]: DEBUG nova.virt.vmwareapi.volumeops [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] [instance: 109fc11e-d640-4617-99a3-0defe0a5aa6c] Reconfigured VM instance instance-0000001e to attach disk [datastore1] 109fc11e-d640-4617-99a3-0defe0a5aa6c/109fc11e-d640-4617-99a3-0defe0a5aa6c.vmdk or device None with type sparse {{(pid=61273) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 652.995574] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-78f9849a-ddca-42a8-8668-172ed07f0c6e {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.998703] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1326d36f-16d0-41b8-93ae-3bf13dc8bea6 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.005860] env[61273]: DEBUG oslo_vmware.api [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Waiting for the task: (returnval){ [ 653.005860] env[61273]: value = "task-375289" [ 653.005860] env[61273]: _type = "Task" [ 653.005860] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.035368] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80dba925-ba5a-43e0-8c9f-30721991963f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.043780] env[61273]: DEBUG oslo_vmware.api [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Task: {'id': task-375289, 'name': Rename_Task} progress is 14%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.047100] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dbab96c-821a-4138-92a6-df3e8299a5bc {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.060707] env[61273]: DEBUG nova.compute.provider_tree [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 653.061224] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Expecting reply to msg f95bb54175764345b6a8ad0766ef23fb in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 653.068595] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f95bb54175764345b6a8ad0766ef23fb [ 653.141960] env[61273]: DEBUG nova.compute.manager [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 653.144255] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] Expecting reply to msg 596a8d85d6344b79ba39d7af04b8b4ed in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 653.183322] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 596a8d85d6344b79ba39d7af04b8b4ed [ 653.184495] env[61273]: DEBUG nova.network.neutron [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] Successfully created port: 7dd0ca68-2bda-4db1-8681-659736235fa8 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 653.379763] env[61273]: DEBUG oslo_concurrency.lockutils [None req-440251be-ae2c-4a35-a60c-febed580cfec tempest-ServerTagsTestJSON-148504296 tempest-ServerTagsTestJSON-148504296-project-member] Lock "32b57d1b-d35f-488e-be23-9119f2f56562" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 103.047s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 653.381283] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] Expecting reply to msg 9028b44684e74a809a6a08b25fbf9cd0 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 653.394354] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9028b44684e74a809a6a08b25fbf9cd0 [ 653.539822] env[61273]: DEBUG oslo_vmware.api [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Task: {'id': task-375289, 'name': Rename_Task, 'duration_secs': 0.137344} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.540123] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] [instance: 109fc11e-d640-4617-99a3-0defe0a5aa6c] Powering on the VM {{(pid=61273) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 653.540361] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4b10f048-d6ee-47e9-a3a9-13e3cdd04191 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.547118] env[61273]: DEBUG oslo_vmware.api [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Waiting for the task: (returnval){ [ 653.547118] env[61273]: value = "task-375290" [ 653.547118] env[61273]: _type = "Task" [ 653.547118] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.555087] env[61273]: DEBUG oslo_vmware.api [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Task: {'id': task-375290, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.564062] env[61273]: DEBUG nova.scheduler.client.report [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 653.567093] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Expecting reply to msg 65b0f9b4c290493f9612801cc229eae6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 653.585384] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 65b0f9b4c290493f9612801cc229eae6 [ 653.649510] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] Expecting reply to msg d08bf7930efb474eb8f0a640466014b5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 653.688421] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d08bf7930efb474eb8f0a640466014b5 [ 653.883197] env[61273]: DEBUG nova.compute.manager [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 653.885184] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] Expecting reply to msg 35f501e126b0470b81d5c7c115dcdbaa in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 653.919591] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 35f501e126b0470b81d5c7c115dcdbaa [ 654.057247] env[61273]: DEBUG oslo_vmware.api [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Task: {'id': task-375290, 'name': PowerOnVM_Task, 'duration_secs': 0.389014} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.057374] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] [instance: 109fc11e-d640-4617-99a3-0defe0a5aa6c] Powered on the VM {{(pid=61273) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 654.057604] env[61273]: INFO nova.compute.manager [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] [instance: 109fc11e-d640-4617-99a3-0defe0a5aa6c] Took 4.32 seconds to spawn the instance on the hypervisor. [ 654.057788] env[61273]: DEBUG nova.compute.manager [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] [instance: 109fc11e-d640-4617-99a3-0defe0a5aa6c] Checking state {{(pid=61273) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 654.058588] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5999b9f7-3adb-4319-bb3d-c7d19e07d761 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.066804] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Expecting reply to msg 49be76b518b14bd6addc697e7a1aee62 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 654.069973] env[61273]: DEBUG oslo_concurrency.lockutils [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.939s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 654.070782] env[61273]: ERROR nova.compute.manager [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b6e7038c-dfd6-439e-846f-4aba6ca1318b, please check neutron logs for more information. [ 654.070782] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] Traceback (most recent call last): [ 654.070782] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 654.070782] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] self.driver.spawn(context, instance, image_meta, [ 654.070782] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 654.070782] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] self._vmops.spawn(context, instance, image_meta, injected_files, [ 654.070782] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 654.070782] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] vm_ref = self.build_virtual_machine(instance, [ 654.070782] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 654.070782] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] vif_infos = vmwarevif.get_vif_info(self._session, [ 654.070782] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 654.071094] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] for vif in network_info: [ 654.071094] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 654.071094] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] return self._sync_wrapper(fn, *args, **kwargs) [ 654.071094] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 654.071094] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] self.wait() [ 654.071094] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 654.071094] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] self[:] = self._gt.wait() [ 654.071094] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 654.071094] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] return self._exit_event.wait() [ 654.071094] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 654.071094] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] result = hub.switch() [ 654.071094] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 654.071094] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] return self.greenlet.switch() [ 654.071434] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 654.071434] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] result = function(*args, **kwargs) [ 654.071434] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 654.071434] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] return func(*args, **kwargs) [ 654.071434] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 654.071434] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] raise e [ 654.071434] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 654.071434] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] nwinfo = self.network_api.allocate_for_instance( [ 654.071434] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 654.071434] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] created_port_ids = self._update_ports_for_instance( [ 654.071434] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 654.071434] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] with excutils.save_and_reraise_exception(): [ 654.071434] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 654.071851] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] self.force_reraise() [ 654.071851] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 654.071851] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] raise self.value [ 654.071851] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 654.071851] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] updated_port = self._update_port( [ 654.071851] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 654.071851] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] _ensure_no_port_binding_failure(port) [ 654.071851] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 654.071851] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] raise exception.PortBindingFailed(port_id=port['id']) [ 654.071851] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] nova.exception.PortBindingFailed: Binding failed for port b6e7038c-dfd6-439e-846f-4aba6ca1318b, please check neutron logs for more information. [ 654.071851] env[61273]: ERROR nova.compute.manager [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] [ 654.072276] env[61273]: DEBUG nova.compute.utils [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] Binding failed for port b6e7038c-dfd6-439e-846f-4aba6ca1318b, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 654.073430] env[61273]: DEBUG oslo_concurrency.lockutils [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.315s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 654.075431] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] Expecting reply to msg 4af0cfcfc4314910b75bd88e7c7b15d6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 654.076690] env[61273]: DEBUG nova.compute.manager [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] Build of instance d4d3db12-8de6-4daf-a087-89bb043d1217 was re-scheduled: Binding failed for port b6e7038c-dfd6-439e-846f-4aba6ca1318b, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 654.077188] env[61273]: DEBUG nova.compute.manager [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 654.077471] env[61273]: DEBUG oslo_concurrency.lockutils [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Acquiring lock "refresh_cache-d4d3db12-8de6-4daf-a087-89bb043d1217" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 654.078844] env[61273]: DEBUG oslo_concurrency.lockutils [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Acquired lock "refresh_cache-d4d3db12-8de6-4daf-a087-89bb043d1217" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 654.079101] env[61273]: DEBUG nova.network.neutron [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 654.079596] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Expecting reply to msg 0be6e3cf105147aab2b58c26208007d8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 654.093666] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0be6e3cf105147aab2b58c26208007d8 [ 654.111083] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4af0cfcfc4314910b75bd88e7c7b15d6 [ 654.112287] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 49be76b518b14bd6addc697e7a1aee62 [ 654.152597] env[61273]: DEBUG nova.compute.manager [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 654.177472] env[61273]: DEBUG nova.virt.hardware [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 654.177831] env[61273]: DEBUG nova.virt.hardware [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 654.178773] env[61273]: DEBUG nova.virt.hardware [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 654.178773] env[61273]: DEBUG nova.virt.hardware [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 654.178773] env[61273]: DEBUG nova.virt.hardware [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 654.178773] env[61273]: DEBUG nova.virt.hardware [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 654.178944] env[61273]: DEBUG nova.virt.hardware [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 654.179114] env[61273]: DEBUG nova.virt.hardware [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 654.179337] env[61273]: DEBUG nova.virt.hardware [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 654.179583] env[61273]: DEBUG nova.virt.hardware [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 654.179831] env[61273]: DEBUG nova.virt.hardware [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 654.181384] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83e9236e-66ef-4e44-ba8b-23655da026f9 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.198166] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1758e6b-84b0-4f82-b44b-5677cc6d0974 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.304723] env[61273]: DEBUG nova.compute.manager [req-690a77ca-227a-40c1-b314-b7107fed0ff1 req-3512de38-1cf3-4d94-8d5d-7530cbda5dbe service nova] [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] Received event network-changed-7dd0ca68-2bda-4db1-8681-659736235fa8 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 654.304918] env[61273]: DEBUG nova.compute.manager [req-690a77ca-227a-40c1-b314-b7107fed0ff1 req-3512de38-1cf3-4d94-8d5d-7530cbda5dbe service nova] [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] Refreshing instance network info cache due to event network-changed-7dd0ca68-2bda-4db1-8681-659736235fa8. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 654.305128] env[61273]: DEBUG oslo_concurrency.lockutils [req-690a77ca-227a-40c1-b314-b7107fed0ff1 req-3512de38-1cf3-4d94-8d5d-7530cbda5dbe service nova] Acquiring lock "refresh_cache-b41c6d21-5e7f-427f-95ce-830fe0da8bc6" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 654.305287] env[61273]: DEBUG oslo_concurrency.lockutils [req-690a77ca-227a-40c1-b314-b7107fed0ff1 req-3512de38-1cf3-4d94-8d5d-7530cbda5dbe service nova] Acquired lock "refresh_cache-b41c6d21-5e7f-427f-95ce-830fe0da8bc6" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 654.305419] env[61273]: DEBUG nova.network.neutron [req-690a77ca-227a-40c1-b314-b7107fed0ff1 req-3512de38-1cf3-4d94-8d5d-7530cbda5dbe service nova] [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] Refreshing network info cache for port 7dd0ca68-2bda-4db1-8681-659736235fa8 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 654.305847] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-690a77ca-227a-40c1-b314-b7107fed0ff1 req-3512de38-1cf3-4d94-8d5d-7530cbda5dbe service nova] Expecting reply to msg 93757ed45f7b4f6e9ccdfbab5dc75ca1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 654.314056] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 93757ed45f7b4f6e9ccdfbab5dc75ca1 [ 654.369890] env[61273]: ERROR nova.compute.manager [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 7dd0ca68-2bda-4db1-8681-659736235fa8, please check neutron logs for more information. [ 654.369890] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 654.369890] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 654.369890] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 654.369890] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 654.369890] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 654.369890] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 654.369890] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 654.369890] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 654.369890] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 654.369890] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 654.369890] env[61273]: ERROR nova.compute.manager raise self.value [ 654.369890] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 654.369890] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 654.369890] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 654.369890] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 654.370360] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 654.370360] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 654.370360] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 7dd0ca68-2bda-4db1-8681-659736235fa8, please check neutron logs for more information. [ 654.370360] env[61273]: ERROR nova.compute.manager [ 654.370360] env[61273]: Traceback (most recent call last): [ 654.370360] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 654.370360] env[61273]: listener.cb(fileno) [ 654.370360] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 654.370360] env[61273]: result = function(*args, **kwargs) [ 654.370360] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 654.370360] env[61273]: return func(*args, **kwargs) [ 654.370360] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 654.370360] env[61273]: raise e [ 654.370360] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 654.370360] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 654.370360] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 654.370360] env[61273]: created_port_ids = self._update_ports_for_instance( [ 654.370360] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 654.370360] env[61273]: with excutils.save_and_reraise_exception(): [ 654.370360] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 654.370360] env[61273]: self.force_reraise() [ 654.370360] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 654.370360] env[61273]: raise self.value [ 654.370360] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 654.370360] env[61273]: updated_port = self._update_port( [ 654.370360] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 654.370360] env[61273]: _ensure_no_port_binding_failure(port) [ 654.370360] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 654.370360] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 654.371153] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 7dd0ca68-2bda-4db1-8681-659736235fa8, please check neutron logs for more information. [ 654.371153] env[61273]: Removing descriptor: 19 [ 654.373817] env[61273]: ERROR nova.compute.manager [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 7dd0ca68-2bda-4db1-8681-659736235fa8, please check neutron logs for more information. [ 654.373817] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] Traceback (most recent call last): [ 654.373817] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 654.373817] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] yield resources [ 654.373817] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 654.373817] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] self.driver.spawn(context, instance, image_meta, [ 654.373817] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 654.373817] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 654.373817] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 654.373817] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] vm_ref = self.build_virtual_machine(instance, [ 654.373817] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 654.374192] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] vif_infos = vmwarevif.get_vif_info(self._session, [ 654.374192] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 654.374192] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] for vif in network_info: [ 654.374192] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 654.374192] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] return self._sync_wrapper(fn, *args, **kwargs) [ 654.374192] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 654.374192] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] self.wait() [ 654.374192] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 654.374192] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] self[:] = self._gt.wait() [ 654.374192] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 654.374192] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] return self._exit_event.wait() [ 654.374192] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 654.374192] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] result = hub.switch() [ 654.374538] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 654.374538] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] return self.greenlet.switch() [ 654.374538] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 654.374538] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] result = function(*args, **kwargs) [ 654.374538] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 654.374538] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] return func(*args, **kwargs) [ 654.374538] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 654.374538] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] raise e [ 654.374538] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 654.374538] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] nwinfo = self.network_api.allocate_for_instance( [ 654.374538] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 654.374538] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] created_port_ids = self._update_ports_for_instance( [ 654.374538] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 654.374948] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] with excutils.save_and_reraise_exception(): [ 654.374948] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 654.374948] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] self.force_reraise() [ 654.374948] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 654.374948] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] raise self.value [ 654.374948] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 654.374948] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] updated_port = self._update_port( [ 654.374948] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 654.374948] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] _ensure_no_port_binding_failure(port) [ 654.374948] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 654.374948] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] raise exception.PortBindingFailed(port_id=port['id']) [ 654.374948] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] nova.exception.PortBindingFailed: Binding failed for port 7dd0ca68-2bda-4db1-8681-659736235fa8, please check neutron logs for more information. [ 654.374948] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] [ 654.375303] env[61273]: INFO nova.compute.manager [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] Terminating instance [ 654.375303] env[61273]: DEBUG oslo_concurrency.lockutils [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] Acquiring lock "refresh_cache-b41c6d21-5e7f-427f-95ce-830fe0da8bc6" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 654.407289] env[61273]: DEBUG oslo_concurrency.lockutils [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 654.576805] env[61273]: INFO nova.compute.manager [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] [instance: 109fc11e-d640-4617-99a3-0defe0a5aa6c] Took 25.99 seconds to build instance. [ 654.577869] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Expecting reply to msg e6255bfe8803452eb097e9f2df6f5f1e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 654.591091] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e6255bfe8803452eb097e9f2df6f5f1e [ 654.607474] env[61273]: DEBUG nova.network.neutron [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 654.664925] env[61273]: DEBUG nova.network.neutron [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 654.665787] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Expecting reply to msg 01e276dc0d864933a27c806ba86e3b55 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 654.675981] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 01e276dc0d864933a27c806ba86e3b55 [ 654.824350] env[61273]: DEBUG nova.network.neutron [req-690a77ca-227a-40c1-b314-b7107fed0ff1 req-3512de38-1cf3-4d94-8d5d-7530cbda5dbe service nova] [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 654.897321] env[61273]: DEBUG nova.network.neutron [req-690a77ca-227a-40c1-b314-b7107fed0ff1 req-3512de38-1cf3-4d94-8d5d-7530cbda5dbe service nova] [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 654.897846] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-690a77ca-227a-40c1-b314-b7107fed0ff1 req-3512de38-1cf3-4d94-8d5d-7530cbda5dbe service nova] Expecting reply to msg 5a847e7144ed4bb797c836048ee534c6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 654.910178] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5a847e7144ed4bb797c836048ee534c6 [ 655.049293] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e3d7457-fb45-4dbf-87de-3a457ed564cf {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.057038] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95d61f26-cd37-49a0-b8cd-901f497e8a7a {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.086917] env[61273]: DEBUG oslo_concurrency.lockutils [None req-20b84c65-f886-4d64-81dc-131def0d4b19 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Lock "109fc11e-d640-4617-99a3-0defe0a5aa6c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 88.356s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 655.087786] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] Expecting reply to msg bb83c584ee5d4cb6b4a37f83846b5a7d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 655.089229] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd3ae8b3-4213-44d2-89e0-e9d91f461f98 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.097685] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-307c66dd-967b-4be4-a4c6-b75278792f91 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.103628] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bb83c584ee5d4cb6b4a37f83846b5a7d [ 655.111941] env[61273]: DEBUG nova.compute.provider_tree [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 655.113154] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] Expecting reply to msg 631d0dcf50924c52992cdcba5e3a2376 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 655.124961] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 631d0dcf50924c52992cdcba5e3a2376 [ 655.150151] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f8692978-2673-40c7-b83a-e134ecc55902 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Expecting reply to msg 3e760b97d4864fe9b8f654da92e0ba11 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 655.162214] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3e760b97d4864fe9b8f654da92e0ba11 [ 655.173054] env[61273]: DEBUG oslo_concurrency.lockutils [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Releasing lock "refresh_cache-d4d3db12-8de6-4daf-a087-89bb043d1217" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 655.173291] env[61273]: DEBUG nova.compute.manager [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 655.173450] env[61273]: DEBUG nova.compute.manager [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 655.173609] env[61273]: DEBUG nova.network.neutron [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 655.314085] env[61273]: DEBUG nova.network.neutron [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 655.314701] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Expecting reply to msg 39e24e13bd8342dfb1e8f6a11d561c5c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 655.327481] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 39e24e13bd8342dfb1e8f6a11d561c5c [ 655.400210] env[61273]: DEBUG oslo_concurrency.lockutils [req-690a77ca-227a-40c1-b314-b7107fed0ff1 req-3512de38-1cf3-4d94-8d5d-7530cbda5dbe service nova] Releasing lock "refresh_cache-b41c6d21-5e7f-427f-95ce-830fe0da8bc6" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 655.400631] env[61273]: DEBUG oslo_concurrency.lockutils [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] Acquired lock "refresh_cache-b41c6d21-5e7f-427f-95ce-830fe0da8bc6" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 655.400824] env[61273]: DEBUG nova.network.neutron [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 655.401262] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] Expecting reply to msg b413b41694cc46a1b6e118ba581db351 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 655.408553] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b413b41694cc46a1b6e118ba581db351 [ 655.461050] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d1819932-15b9-4a4d-a680-d9adfa2a43ab tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Expecting reply to msg f138e4331d57412dbe1e6e08cd81c962 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 655.474373] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f138e4331d57412dbe1e6e08cd81c962 [ 655.592789] env[61273]: DEBUG nova.compute.manager [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] [instance: 297a5546-6159-462c-a436-032d94855c00] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 655.594498] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] Expecting reply to msg 52e5ed9dc6f847c69953c50d0b400331 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 655.614835] env[61273]: DEBUG nova.scheduler.client.report [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 655.617299] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] Expecting reply to msg bc3e629826db4b38bc2a4635caea6308 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 655.633507] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bc3e629826db4b38bc2a4635caea6308 [ 655.635678] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 52e5ed9dc6f847c69953c50d0b400331 [ 655.652347] env[61273]: DEBUG nova.compute.manager [None req-f8692978-2673-40c7-b83a-e134ecc55902 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] [instance: 109fc11e-d640-4617-99a3-0defe0a5aa6c] Checking state {{(pid=61273) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 655.655413] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6581a8e2-9721-4809-b1fd-2996842c0989 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.662944] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f8692978-2673-40c7-b83a-e134ecc55902 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Expecting reply to msg 58f2215346dc4411a8719b4974791615 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 655.687082] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 58f2215346dc4411a8719b4974791615 [ 655.817023] env[61273]: DEBUG nova.network.neutron [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 655.822100] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Expecting reply to msg f841c1b032754031b0121099e497a1af in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 655.831494] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f841c1b032754031b0121099e497a1af [ 655.927652] env[61273]: DEBUG nova.network.neutron [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 655.964206] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d1819932-15b9-4a4d-a680-d9adfa2a43ab tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Acquiring lock "109fc11e-d640-4617-99a3-0defe0a5aa6c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 655.964537] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d1819932-15b9-4a4d-a680-d9adfa2a43ab tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Lock "109fc11e-d640-4617-99a3-0defe0a5aa6c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 655.964764] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d1819932-15b9-4a4d-a680-d9adfa2a43ab tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Acquiring lock "109fc11e-d640-4617-99a3-0defe0a5aa6c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 655.964948] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d1819932-15b9-4a4d-a680-d9adfa2a43ab tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Lock "109fc11e-d640-4617-99a3-0defe0a5aa6c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 655.965112] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d1819932-15b9-4a4d-a680-d9adfa2a43ab tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Lock "109fc11e-d640-4617-99a3-0defe0a5aa6c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 655.968140] env[61273]: INFO nova.compute.manager [None req-d1819932-15b9-4a4d-a680-d9adfa2a43ab tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] [instance: 109fc11e-d640-4617-99a3-0defe0a5aa6c] Terminating instance [ 655.973759] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d1819932-15b9-4a4d-a680-d9adfa2a43ab tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Acquiring lock "refresh_cache-109fc11e-d640-4617-99a3-0defe0a5aa6c" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 655.974858] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d1819932-15b9-4a4d-a680-d9adfa2a43ab tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Acquired lock "refresh_cache-109fc11e-d640-4617-99a3-0defe0a5aa6c" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 655.974858] env[61273]: DEBUG nova.network.neutron [None req-d1819932-15b9-4a4d-a680-d9adfa2a43ab tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] [instance: 109fc11e-d640-4617-99a3-0defe0a5aa6c] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 655.975262] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d1819932-15b9-4a4d-a680-d9adfa2a43ab tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Expecting reply to msg 8c74b79f566347f7b80bb01fdc405be7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 655.990931] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8c74b79f566347f7b80bb01fdc405be7 [ 656.036534] env[61273]: DEBUG nova.network.neutron [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 656.037333] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] Expecting reply to msg 379cb9b930734bd2a41f0d85458213a9 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 656.047813] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 379cb9b930734bd2a41f0d85458213a9 [ 656.120239] env[61273]: DEBUG oslo_concurrency.lockutils [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.047s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 656.121218] env[61273]: ERROR nova.compute.manager [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] [instance: 13c1d417-4087-46ad-b513-fc3317995d18] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port c0184233-b80c-46b2-a4e8-f4582c2edc01, please check neutron logs for more information. [ 656.121218] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] Traceback (most recent call last): [ 656.121218] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 656.121218] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] self.driver.spawn(context, instance, image_meta, [ 656.121218] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 656.121218] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] self._vmops.spawn(context, instance, image_meta, injected_files, [ 656.121218] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 656.121218] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] vm_ref = self.build_virtual_machine(instance, [ 656.121218] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 656.121218] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] vif_infos = vmwarevif.get_vif_info(self._session, [ 656.121218] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 656.121655] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] for vif in network_info: [ 656.121655] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 656.121655] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] return self._sync_wrapper(fn, *args, **kwargs) [ 656.121655] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 656.121655] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] self.wait() [ 656.121655] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 656.121655] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] self[:] = self._gt.wait() [ 656.121655] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 656.121655] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] return self._exit_event.wait() [ 656.121655] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 656.121655] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] result = hub.switch() [ 656.121655] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 656.121655] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] return self.greenlet.switch() [ 656.122050] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 656.122050] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] result = function(*args, **kwargs) [ 656.122050] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 656.122050] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] return func(*args, **kwargs) [ 656.122050] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 656.122050] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] raise e [ 656.122050] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 656.122050] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] nwinfo = self.network_api.allocate_for_instance( [ 656.122050] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 656.122050] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] created_port_ids = self._update_ports_for_instance( [ 656.122050] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 656.122050] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] with excutils.save_and_reraise_exception(): [ 656.122050] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 656.122436] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] self.force_reraise() [ 656.122436] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 656.122436] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] raise self.value [ 656.122436] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 656.122436] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] updated_port = self._update_port( [ 656.122436] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 656.122436] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] _ensure_no_port_binding_failure(port) [ 656.122436] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 656.122436] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] raise exception.PortBindingFailed(port_id=port['id']) [ 656.122436] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] nova.exception.PortBindingFailed: Binding failed for port c0184233-b80c-46b2-a4e8-f4582c2edc01, please check neutron logs for more information. [ 656.122436] env[61273]: ERROR nova.compute.manager [instance: 13c1d417-4087-46ad-b513-fc3317995d18] [ 656.123677] env[61273]: DEBUG nova.compute.utils [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] [instance: 13c1d417-4087-46ad-b513-fc3317995d18] Binding failed for port c0184233-b80c-46b2-a4e8-f4582c2edc01, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 656.125113] env[61273]: DEBUG oslo_concurrency.lockutils [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.901s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 656.127759] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] Expecting reply to msg f2b7a6a00d3f45d0a09e1396d4b3cd2e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 656.129507] env[61273]: DEBUG nova.compute.manager [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] [instance: 13c1d417-4087-46ad-b513-fc3317995d18] Build of instance 13c1d417-4087-46ad-b513-fc3317995d18 was re-scheduled: Binding failed for port c0184233-b80c-46b2-a4e8-f4582c2edc01, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 656.130199] env[61273]: DEBUG nova.compute.manager [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] [instance: 13c1d417-4087-46ad-b513-fc3317995d18] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 656.130617] env[61273]: DEBUG oslo_concurrency.lockutils [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] Acquiring lock "refresh_cache-13c1d417-4087-46ad-b513-fc3317995d18" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 656.138835] env[61273]: DEBUG oslo_concurrency.lockutils [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] Acquired lock "refresh_cache-13c1d417-4087-46ad-b513-fc3317995d18" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 656.139247] env[61273]: DEBUG nova.network.neutron [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] [instance: 13c1d417-4087-46ad-b513-fc3317995d18] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 656.139827] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] Expecting reply to msg 2eb68e7f9fad4e31892c4be66b0373fd in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 656.147144] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2eb68e7f9fad4e31892c4be66b0373fd [ 656.167578] env[61273]: INFO nova.compute.manager [None req-f8692978-2673-40c7-b83a-e134ecc55902 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] [instance: 109fc11e-d640-4617-99a3-0defe0a5aa6c] instance snapshotting [ 656.169431] env[61273]: DEBUG nova.objects.instance [None req-f8692978-2673-40c7-b83a-e134ecc55902 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Lazy-loading 'flavor' on Instance uuid 109fc11e-d640-4617-99a3-0defe0a5aa6c {{(pid=61273) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 656.171882] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f8692978-2673-40c7-b83a-e134ecc55902 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Expecting reply to msg be0aef84f51e4273b24d7ba02d1c8235 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 656.176504] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f2b7a6a00d3f45d0a09e1396d4b3cd2e [ 656.201440] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 656.204561] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg be0aef84f51e4273b24d7ba02d1c8235 [ 656.325960] env[61273]: INFO nova.compute.manager [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] [instance: d4d3db12-8de6-4daf-a087-89bb043d1217] Took 1.15 seconds to deallocate network for instance. [ 656.328436] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Expecting reply to msg 64b6d794dfaf442e9cdb4c8024bd1435 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 656.332947] env[61273]: DEBUG nova.compute.manager [req-8efd8310-532c-4c30-b583-4bb9c4cb63f3 req-00a15425-6ad5-4140-ab93-f7d9498c0794 service nova] [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] Received event network-vif-deleted-7dd0ca68-2bda-4db1-8681-659736235fa8 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 656.364379] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 64b6d794dfaf442e9cdb4c8024bd1435 [ 656.510191] env[61273]: DEBUG nova.network.neutron [None req-d1819932-15b9-4a4d-a680-d9adfa2a43ab tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] [instance: 109fc11e-d640-4617-99a3-0defe0a5aa6c] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 656.539209] env[61273]: DEBUG oslo_concurrency.lockutils [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] Releasing lock "refresh_cache-b41c6d21-5e7f-427f-95ce-830fe0da8bc6" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 656.540293] env[61273]: DEBUG nova.compute.manager [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 656.540680] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 656.541168] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bcb14171-0313-4bf0-a10a-92fdd07f3ee3 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.550702] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-494135fa-8b10-40bb-85c8-7c1977b36cb0 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.575724] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b41c6d21-5e7f-427f-95ce-830fe0da8bc6 could not be found. [ 656.575959] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 656.576855] env[61273]: INFO nova.compute.manager [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] Took 0.04 seconds to destroy the instance on the hypervisor. [ 656.577169] env[61273]: DEBUG oslo.service.loopingcall [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 656.578094] env[61273]: DEBUG nova.network.neutron [None req-d1819932-15b9-4a4d-a680-d9adfa2a43ab tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] [instance: 109fc11e-d640-4617-99a3-0defe0a5aa6c] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 656.578593] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d1819932-15b9-4a4d-a680-d9adfa2a43ab tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Expecting reply to msg b7a395b286f6495f86f519d1ccb95adb in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 656.579495] env[61273]: DEBUG nova.compute.manager [-] [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 656.579549] env[61273]: DEBUG nova.network.neutron [-] [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 656.593004] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b7a395b286f6495f86f519d1ccb95adb [ 656.616416] env[61273]: DEBUG nova.network.neutron [-] [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 656.616938] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 4689b55d5b354e42800bc90b32869e65 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 656.623851] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4689b55d5b354e42800bc90b32869e65 [ 656.668829] env[61273]: DEBUG nova.network.neutron [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] [instance: 13c1d417-4087-46ad-b513-fc3317995d18] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 656.680038] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19f7f2b9-5343-460e-a715-939e40760e87 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.710305] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98ca51ee-0d17-4525-a7eb-521a1d296287 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.719289] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f8692978-2673-40c7-b83a-e134ecc55902 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Expecting reply to msg f574c5866ad14bf9b605a312d1ba9930 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 656.771924] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f574c5866ad14bf9b605a312d1ba9930 [ 656.800166] env[61273]: DEBUG nova.network.neutron [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] [instance: 13c1d417-4087-46ad-b513-fc3317995d18] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 656.800699] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] Expecting reply to msg 3b6d43d7b5a04036a56194bf5f3fae64 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 656.814567] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3b6d43d7b5a04036a56194bf5f3fae64 [ 656.833175] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Expecting reply to msg ea026dbc79644b1c9f0fe548f126a58d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 656.866432] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ea026dbc79644b1c9f0fe548f126a58d [ 657.038949] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9dba44a-22d6-4313-a884-2cc39f919f69 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.046427] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-985df30d-6879-4061-868d-901d7d989014 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.076871] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13ea475c-dd81-4a71-b156-c78967679867 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.083442] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d1819932-15b9-4a4d-a680-d9adfa2a43ab tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Releasing lock "refresh_cache-109fc11e-d640-4617-99a3-0defe0a5aa6c" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 657.083872] env[61273]: DEBUG nova.compute.manager [None req-d1819932-15b9-4a4d-a680-d9adfa2a43ab tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] [instance: 109fc11e-d640-4617-99a3-0defe0a5aa6c] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 657.084086] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-d1819932-15b9-4a4d-a680-d9adfa2a43ab tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] [instance: 109fc11e-d640-4617-99a3-0defe0a5aa6c] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 657.084838] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-408c144f-0730-4c0d-bc67-03a30a5f75f0 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.088525] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4d038ad-3c10-4ae9-855f-36187f78bc99 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.096636] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1819932-15b9-4a4d-a680-d9adfa2a43ab tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] [instance: 109fc11e-d640-4617-99a3-0defe0a5aa6c] Powering off the VM {{(pid=61273) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 657.104179] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4a9efdbe-f23d-45ff-b6fe-561990e3dfce {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.105872] env[61273]: DEBUG nova.compute.provider_tree [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 657.106349] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] Expecting reply to msg 88b19e84462e4d2294287b3f3952de00 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 657.113217] env[61273]: DEBUG oslo_vmware.api [None req-d1819932-15b9-4a4d-a680-d9adfa2a43ab tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Waiting for the task: (returnval){ [ 657.113217] env[61273]: value = "task-375291" [ 657.113217] env[61273]: _type = "Task" [ 657.113217] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.118217] env[61273]: DEBUG nova.network.neutron [-] [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 657.118615] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg ecc12366d4654b9e8135d32e244d69d6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 657.119681] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 88b19e84462e4d2294287b3f3952de00 [ 657.123048] env[61273]: DEBUG oslo_vmware.api [None req-d1819932-15b9-4a4d-a680-d9adfa2a43ab tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Task: {'id': task-375291, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.126828] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ecc12366d4654b9e8135d32e244d69d6 [ 657.150083] env[61273]: DEBUG oslo_concurrency.lockutils [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Acquiring lock "f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 657.150430] env[61273]: DEBUG oslo_concurrency.lockutils [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Lock "f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 657.227663] env[61273]: DEBUG nova.compute.manager [None req-f8692978-2673-40c7-b83a-e134ecc55902 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] [instance: 109fc11e-d640-4617-99a3-0defe0a5aa6c] Instance disappeared during snapshot {{(pid=61273) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4494}} [ 657.304011] env[61273]: DEBUG oslo_concurrency.lockutils [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] Releasing lock "refresh_cache-13c1d417-4087-46ad-b513-fc3317995d18" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 657.304255] env[61273]: DEBUG nova.compute.manager [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 657.304435] env[61273]: DEBUG nova.compute.manager [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] [instance: 13c1d417-4087-46ad-b513-fc3317995d18] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 657.304601] env[61273]: DEBUG nova.network.neutron [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] [instance: 13c1d417-4087-46ad-b513-fc3317995d18] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 657.319605] env[61273]: DEBUG nova.network.neutron [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] [instance: 13c1d417-4087-46ad-b513-fc3317995d18] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 657.320202] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] Expecting reply to msg 1975ed1dfb0d4e79bc8c45f42c495079 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 657.327828] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1975ed1dfb0d4e79bc8c45f42c495079 [ 657.353916] env[61273]: INFO nova.scheduler.client.report [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Deleted allocations for instance d4d3db12-8de6-4daf-a087-89bb043d1217 [ 657.359604] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Expecting reply to msg a9caaecd39e846eca1c51f0c1eade250 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 657.362024] env[61273]: DEBUG nova.compute.manager [None req-f8692978-2673-40c7-b83a-e134ecc55902 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] [instance: 109fc11e-d640-4617-99a3-0defe0a5aa6c] Found 0 images (rotation: 2) {{(pid=61273) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4554}} [ 657.362340] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f8692978-2673-40c7-b83a-e134ecc55902 tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Expecting reply to msg 84a9512545944c63acbaf851e74c1ef1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 657.369987] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a9caaecd39e846eca1c51f0c1eade250 [ 657.382842] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 84a9512545944c63acbaf851e74c1ef1 [ 657.612033] env[61273]: DEBUG nova.scheduler.client.report [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 657.614528] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] Expecting reply to msg 53a9ecece90646d882dd6f0e3fab8a4e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 657.624738] env[61273]: INFO nova.compute.manager [-] [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] Took 1.05 seconds to deallocate network for instance. [ 657.625033] env[61273]: DEBUG oslo_vmware.api [None req-d1819932-15b9-4a4d-a680-d9adfa2a43ab tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Task: {'id': task-375291, 'name': PowerOffVM_Task, 'duration_secs': 0.119289} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.626702] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1819932-15b9-4a4d-a680-d9adfa2a43ab tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] [instance: 109fc11e-d640-4617-99a3-0defe0a5aa6c] Powered off the VM {{(pid=61273) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 657.626904] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-d1819932-15b9-4a4d-a680-d9adfa2a43ab tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] [instance: 109fc11e-d640-4617-99a3-0defe0a5aa6c] Unregistering the VM {{(pid=61273) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 657.627415] env[61273]: DEBUG nova.compute.claims [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 657.627585] env[61273]: DEBUG oslo_concurrency.lockutils [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 657.627820] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-91bbd911-e003-4a6a-9f1b-8f440667bdea {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.632802] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 53a9ecece90646d882dd6f0e3fab8a4e [ 657.653421] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-d1819932-15b9-4a4d-a680-d9adfa2a43ab tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] [instance: 109fc11e-d640-4617-99a3-0defe0a5aa6c] Unregistered the VM {{(pid=61273) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 657.653641] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-d1819932-15b9-4a4d-a680-d9adfa2a43ab tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] [instance: 109fc11e-d640-4617-99a3-0defe0a5aa6c] Deleting contents of the VM from datastore datastore1 {{(pid=61273) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 657.653821] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1819932-15b9-4a4d-a680-d9adfa2a43ab tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Deleting the datastore file [datastore1] 109fc11e-d640-4617-99a3-0defe0a5aa6c {{(pid=61273) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 657.654130] env[61273]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-20bf388d-3267-4798-abb8-16d59d87a7aa {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.660062] env[61273]: DEBUG oslo_vmware.api [None req-d1819932-15b9-4a4d-a680-d9adfa2a43ab tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Waiting for the task: (returnval){ [ 657.660062] env[61273]: value = "task-375293" [ 657.660062] env[61273]: _type = "Task" [ 657.660062] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.668335] env[61273]: DEBUG oslo_vmware.api [None req-d1819932-15b9-4a4d-a680-d9adfa2a43ab tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Task: {'id': task-375293, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.823278] env[61273]: DEBUG nova.network.neutron [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] [instance: 13c1d417-4087-46ad-b513-fc3317995d18] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 657.823821] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] Expecting reply to msg d7023a99035b496a96d95badb71972ad in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 657.832416] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d7023a99035b496a96d95badb71972ad [ 657.861800] env[61273]: DEBUG oslo_concurrency.lockutils [None req-1b6c5538-9fc0-45d5-a776-c5974fa8862a tempest-DeleteServersAdminTestJSON-581952752 tempest-DeleteServersAdminTestJSON-581952752-project-member] Lock "d4d3db12-8de6-4daf-a087-89bb043d1217" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 105.206s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 657.862381] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Expecting reply to msg 545b3e884951414f94cdd26a80f2f8d1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 657.873149] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 545b3e884951414f94cdd26a80f2f8d1 [ 658.120513] env[61273]: DEBUG oslo_concurrency.lockutils [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.995s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 658.121151] env[61273]: ERROR nova.compute.manager [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 90378eb2-b59a-48b8-a577-33399d56d5c9, please check neutron logs for more information. [ 658.121151] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] Traceback (most recent call last): [ 658.121151] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 658.121151] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] self.driver.spawn(context, instance, image_meta, [ 658.121151] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 658.121151] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] self._vmops.spawn(context, instance, image_meta, injected_files, [ 658.121151] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 658.121151] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] vm_ref = self.build_virtual_machine(instance, [ 658.121151] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 658.121151] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] vif_infos = vmwarevif.get_vif_info(self._session, [ 658.121151] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 658.121466] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] for vif in network_info: [ 658.121466] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 658.121466] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] return self._sync_wrapper(fn, *args, **kwargs) [ 658.121466] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 658.121466] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] self.wait() [ 658.121466] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 658.121466] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] self[:] = self._gt.wait() [ 658.121466] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 658.121466] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] return self._exit_event.wait() [ 658.121466] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 658.121466] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] current.throw(*self._exc) [ 658.121466] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 658.121466] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] result = function(*args, **kwargs) [ 658.121858] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 658.121858] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] return func(*args, **kwargs) [ 658.121858] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 658.121858] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] raise e [ 658.121858] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 658.121858] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] nwinfo = self.network_api.allocate_for_instance( [ 658.121858] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 658.121858] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] created_port_ids = self._update_ports_for_instance( [ 658.121858] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 658.121858] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] with excutils.save_and_reraise_exception(): [ 658.121858] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 658.121858] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] self.force_reraise() [ 658.121858] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 658.122208] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] raise self.value [ 658.122208] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 658.122208] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] updated_port = self._update_port( [ 658.122208] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 658.122208] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] _ensure_no_port_binding_failure(port) [ 658.122208] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 658.122208] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] raise exception.PortBindingFailed(port_id=port['id']) [ 658.122208] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] nova.exception.PortBindingFailed: Binding failed for port 90378eb2-b59a-48b8-a577-33399d56d5c9, please check neutron logs for more information. [ 658.122208] env[61273]: ERROR nova.compute.manager [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] [ 658.122208] env[61273]: DEBUG nova.compute.utils [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] Binding failed for port 90378eb2-b59a-48b8-a577-33399d56d5c9, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 658.123773] env[61273]: DEBUG nova.compute.manager [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] Build of instance 782dc85a-56f4-4f03-8711-b78bbadb33ce was re-scheduled: Binding failed for port 90378eb2-b59a-48b8-a577-33399d56d5c9, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 658.124203] env[61273]: DEBUG nova.compute.manager [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 658.124426] env[61273]: DEBUG oslo_concurrency.lockutils [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] Acquiring lock "refresh_cache-782dc85a-56f4-4f03-8711-b78bbadb33ce" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 658.124571] env[61273]: DEBUG oslo_concurrency.lockutils [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] Acquired lock "refresh_cache-782dc85a-56f4-4f03-8711-b78bbadb33ce" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 658.124726] env[61273]: DEBUG nova.network.neutron [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 658.125184] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] Expecting reply to msg 69c5ad7d9fde4d1e8655639cb22b86cb in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 658.125889] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 17.161s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 658.126650] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 22e65e51a817474784274b9b1c47871a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 658.135809] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 69c5ad7d9fde4d1e8655639cb22b86cb [ 658.151013] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 22e65e51a817474784274b9b1c47871a [ 658.171481] env[61273]: DEBUG oslo_vmware.api [None req-d1819932-15b9-4a4d-a680-d9adfa2a43ab tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Task: {'id': task-375293, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.123612} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.171796] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1819932-15b9-4a4d-a680-d9adfa2a43ab tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Deleted the datastore file {{(pid=61273) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 658.171972] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-d1819932-15b9-4a4d-a680-d9adfa2a43ab tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] [instance: 109fc11e-d640-4617-99a3-0defe0a5aa6c] Deleted contents of the VM from datastore datastore1 {{(pid=61273) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 658.172157] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-d1819932-15b9-4a4d-a680-d9adfa2a43ab tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] [instance: 109fc11e-d640-4617-99a3-0defe0a5aa6c] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 658.172322] env[61273]: INFO nova.compute.manager [None req-d1819932-15b9-4a4d-a680-d9adfa2a43ab tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] [instance: 109fc11e-d640-4617-99a3-0defe0a5aa6c] Took 1.09 seconds to destroy the instance on the hypervisor. [ 658.172571] env[61273]: DEBUG oslo.service.loopingcall [None req-d1819932-15b9-4a4d-a680-d9adfa2a43ab tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 658.172740] env[61273]: DEBUG nova.compute.manager [-] [instance: 109fc11e-d640-4617-99a3-0defe0a5aa6c] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 658.172831] env[61273]: DEBUG nova.network.neutron [-] [instance: 109fc11e-d640-4617-99a3-0defe0a5aa6c] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 658.193606] env[61273]: DEBUG nova.network.neutron [-] [instance: 109fc11e-d640-4617-99a3-0defe0a5aa6c] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 658.194132] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 978cee0b760b4835ba3060305df15038 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 658.204377] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 978cee0b760b4835ba3060305df15038 [ 658.327239] env[61273]: INFO nova.compute.manager [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] [instance: 13c1d417-4087-46ad-b513-fc3317995d18] Took 1.02 seconds to deallocate network for instance. [ 658.328316] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] Expecting reply to msg bd0425e1a9234f9eb39e846496dc88af in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 658.363487] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bd0425e1a9234f9eb39e846496dc88af [ 658.364225] env[61273]: DEBUG nova.compute.manager [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 658.365962] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Expecting reply to msg a1b311f27bc545b48b4356403cf74e24 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 658.402226] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a1b311f27bc545b48b4356403cf74e24 [ 658.634170] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 29d7b37871484981be4944f5488deb63 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 658.651802] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 29d7b37871484981be4944f5488deb63 [ 658.696998] env[61273]: DEBUG nova.network.neutron [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 658.699352] env[61273]: DEBUG nova.network.neutron [-] [instance: 109fc11e-d640-4617-99a3-0defe0a5aa6c] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 658.699624] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 35550a8eb08e43e58969b990de2c3978 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 658.708294] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 35550a8eb08e43e58969b990de2c3978 [ 658.833496] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] Expecting reply to msg bbf051b39ae04bfeb209a05d115f168e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 658.856782] env[61273]: DEBUG nova.network.neutron [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 658.856890] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] Expecting reply to msg 3df52abe0d7d49268db99f053cd9cfa1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 658.865534] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3df52abe0d7d49268db99f053cd9cfa1 [ 658.874723] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bbf051b39ae04bfeb209a05d115f168e [ 658.892706] env[61273]: DEBUG oslo_concurrency.lockutils [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 659.160236] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 343f5d7e08e04ae6824807b48293c81f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 659.167776] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 343f5d7e08e04ae6824807b48293c81f [ 659.201478] env[61273]: INFO nova.compute.manager [-] [instance: 109fc11e-d640-4617-99a3-0defe0a5aa6c] Took 1.03 seconds to deallocate network for instance. [ 659.205366] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d1819932-15b9-4a4d-a680-d9adfa2a43ab tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Expecting reply to msg 57cc8e9f09c344db9c8c2c896d6d3ad1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 659.236197] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 57cc8e9f09c344db9c8c2c896d6d3ad1 [ 659.357857] env[61273]: INFO nova.scheduler.client.report [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] Deleted allocations for instance 13c1d417-4087-46ad-b513-fc3317995d18 [ 659.364288] env[61273]: DEBUG oslo_concurrency.lockutils [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] Releasing lock "refresh_cache-782dc85a-56f4-4f03-8711-b78bbadb33ce" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 659.364520] env[61273]: DEBUG nova.compute.manager [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 659.364697] env[61273]: DEBUG nova.compute.manager [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 659.364861] env[61273]: DEBUG nova.network.neutron [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 659.366812] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] Expecting reply to msg aa0aba82cfea404d826082255a754214 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 659.379413] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aa0aba82cfea404d826082255a754214 [ 659.380925] env[61273]: DEBUG nova.network.neutron [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 659.381463] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] Expecting reply to msg 2517e8c57de34be88b332978847e03be in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 659.389748] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2517e8c57de34be88b332978847e03be [ 659.661847] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance 13c1d417-4087-46ad-b513-fc3317995d18 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 659.662424] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg d1f102c1f7154e4eb00e5b4dadd3f066 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 659.672167] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d1f102c1f7154e4eb00e5b4dadd3f066 [ 659.708229] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d1819932-15b9-4a4d-a680-d9adfa2a43ab tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 659.869312] env[61273]: DEBUG oslo_concurrency.lockutils [None req-8f1451f3-cc8e-49e0-a234-0e4f6bacfeea tempest-ServersTestJSON-1053663491 tempest-ServersTestJSON-1053663491-project-member] Lock "13c1d417-4087-46ad-b513-fc3317995d18" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 106.128s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 659.869924] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] Expecting reply to msg 7ab5971882944a34802d743376990525 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 659.880755] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7ab5971882944a34802d743376990525 [ 659.882881] env[61273]: DEBUG nova.network.neutron [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 659.883377] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] Expecting reply to msg 4dfcc65e3835469094e0199317f66842 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 659.894235] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4dfcc65e3835469094e0199317f66842 [ 660.165278] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance 782dc85a-56f4-4f03-8711-b78bbadb33ce has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 660.165404] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance 1336becb-9691-490c-86ea-3bc70d13d7df actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 660.165526] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance 30ed4438-4f74-4bc3-a6cc-a59420751940 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 660.165649] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance 767d7956-954b-4be7-8cc6-45872ff4cfce actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 660.165828] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance 109fc11e-d640-4617-99a3-0defe0a5aa6c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 660.165974] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance b41c6d21-5e7f-427f-95ce-830fe0da8bc6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 660.166562] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg a4e0372c532b411a8febc52332e773cf in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 660.182356] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a4e0372c532b411a8febc52332e773cf [ 660.371652] env[61273]: DEBUG nova.compute.manager [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 660.373405] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] Expecting reply to msg 87a522fd97fa433191780197b921eb08 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 660.388304] env[61273]: INFO nova.compute.manager [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] [instance: 782dc85a-56f4-4f03-8711-b78bbadb33ce] Took 1.02 seconds to deallocate network for instance. [ 660.389934] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] Expecting reply to msg d2bc4f3cb6404f609fd0822dc72b9a6b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 660.407967] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 87a522fd97fa433191780197b921eb08 [ 660.443912] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d2bc4f3cb6404f609fd0822dc72b9a6b [ 660.670553] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance 12c47e99-faf4-4083-a46f-4e33c451e980 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 660.671161] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 5c3a93b5c51d44c49e75a011b1229986 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 660.682680] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5c3a93b5c51d44c49e75a011b1229986 [ 660.894515] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] Expecting reply to msg ea53cf1c9ecc4b6599f7e2fdd7140c58 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 660.896580] env[61273]: DEBUG oslo_concurrency.lockutils [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 660.928981] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ea53cf1c9ecc4b6599f7e2fdd7140c58 [ 661.174271] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance 5ea287cd-ba85-446d-85d0-5a050fe49f17 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 661.174833] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 147b2b4d97994d1b9be4bc83e393589a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 661.186245] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 147b2b4d97994d1b9be4bc83e393589a [ 661.416579] env[61273]: INFO nova.scheduler.client.report [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] Deleted allocations for instance 782dc85a-56f4-4f03-8711-b78bbadb33ce [ 661.423940] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] Expecting reply to msg 532f628804be44c69fe3656d61fe550c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 661.444972] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 532f628804be44c69fe3656d61fe550c [ 661.680328] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance 297a5546-6159-462c-a436-032d94855c00 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 661.680328] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 2a871a77853e48e0a044e070a511fa79 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 661.693925] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2a871a77853e48e0a044e070a511fa79 [ 661.927641] env[61273]: DEBUG oslo_concurrency.lockutils [None req-cd1781c5-927a-467d-bd44-c41c08f12193 tempest-InstanceActionsNegativeTestJSON-2128650360 tempest-InstanceActionsNegativeTestJSON-2128650360-project-member] Lock "782dc85a-56f4-4f03-8711-b78bbadb33ce" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 107.295s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 661.927641] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Expecting reply to msg 22f2f49b807c4106aab408ef24155557 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 661.941659] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 22f2f49b807c4106aab408ef24155557 [ 662.184164] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance 2b87dfbe-2b94-4787-a795-94f8b63f651c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 662.184164] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 80dfc6d52a0547b8a658b7511226c6cf in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 662.193893] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 80dfc6d52a0547b8a658b7511226c6cf [ 662.432636] env[61273]: DEBUG nova.compute.manager [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 662.432636] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Expecting reply to msg a0b8075a825a435c8c62313187e0cbc4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 662.472039] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a0b8075a825a435c8c62313187e0cbc4 [ 662.687707] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance e2560c8e-61c6-4343-82cb-47dc5b1997fb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 662.687707] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 8c58b24f2f6d48c9a07984a548384ecb in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 662.698378] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8c58b24f2f6d48c9a07984a548384ecb [ 662.951291] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 663.189996] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance 3635532a-2af3-4ef5-a922-37fc763c9708 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 663.190615] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 56ce20ce2e3a4575bf0597cdf943b290 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 663.206495] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 56ce20ce2e3a4575bf0597cdf943b290 [ 663.694205] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance 144c3c21-b18e-4997-a241-8ff21a3b4835 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 663.694799] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg f8acf00313594caa8fbc0ada05e03418 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 663.705351] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f8acf00313594caa8fbc0ada05e03418 [ 664.199509] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance 8d63e0a8-85a1-400b-a6f0-8e87c7945655 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 664.200449] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 9d3eadf863e74f12beede848f9524dc4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 664.201562] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] Acquiring lock "2cd160c6-98ac-44a7-831e-d0fa3a958b99" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 664.201930] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] Lock "2cd160c6-98ac-44a7-831e-d0fa3a958b99" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 664.210466] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9d3eadf863e74f12beede848f9524dc4 [ 664.704405] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance bf40cc8c-4729-49c5-8c9d-e3ee09606aa5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 664.704708] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg f458f4cc4a564b37abc159274d7bc2e1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 664.715792] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f458f4cc4a564b37abc159274d7bc2e1 [ 665.212513] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance 23774aa5-1608-495f-8015-29e25f856c69 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 665.212688] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 496dc387a24b40e89dcf882d10847a36 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 665.224377] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 496dc387a24b40e89dcf882d10847a36 [ 665.714955] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance b6a158f8-6e2a-4967-ad05-761804ec6590 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 665.715574] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 451c0410cbd54c4da408429f14c7b954 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 665.726320] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 451c0410cbd54c4da408429f14c7b954 [ 666.217823] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance ca8a38c7-a81c-407a-9558-3d15e492d9fa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 666.218421] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 5d86f4545c464e10a1fca812f2b01ef0 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 666.228588] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5d86f4545c464e10a1fca812f2b01ef0 [ 666.720920] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 666.721584] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 210bdf0a9894495688d87bb981cb4cc7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 666.740612] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 210bdf0a9894495688d87bb981cb4cc7 [ 667.224693] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance 799d4a06-f7a3-4b92-8e96-ac076848fdd3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 667.225317] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 8ea81faaa7fe4d358fd97fbcb5a9c419 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 667.236444] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8ea81faaa7fe4d358fd97fbcb5a9c419 [ 667.728725] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance 21213cff-55b3-48fd-91b4-6718f7819bc3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 667.729318] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg bcd94904e3db4ee49abba9dc66851e12 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 667.744071] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bcd94904e3db4ee49abba9dc66851e12 [ 667.903764] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Acquiring lock "82f77423-cee6-4a04-8463-cabe57cba9cf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 667.903993] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Lock "82f77423-cee6-4a04-8463-cabe57cba9cf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 667.930100] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Acquiring lock "a0a40c68-77e2-4152-ac2e-059f8f7a8f78" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 667.930340] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Lock "a0a40c68-77e2-4152-ac2e-059f8f7a8f78" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 668.232238] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance f3df4816-ef02-4ecc-a8ca-4f0eaf286218 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 668.232919] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg a863b7a018b8411f965261052c7a3e25 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 668.247884] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a863b7a018b8411f965261052c7a3e25 [ 668.739874] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance 2a7d4872-4ed7-4058-bc36-b199d89a9f14 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 668.740530] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 3a9262c094784dcab438af82cde49b81 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 668.755172] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3a9262c094784dcab438af82cde49b81 [ 669.243815] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance c376b161-74f9-405a-bb86-516583a9a76f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 669.244465] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 2d920c79d63b42f2920e930812d8807d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 669.255351] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2d920c79d63b42f2920e930812d8807d [ 669.747090] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 669.747432] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=61273) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 669.747509] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1472MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=61273) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 670.097605] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54149e9f-e3d6-4e71-b587-8fc9b09a5ec8 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.105247] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d7d2487-0e99-4125-a528-10dc04f9f287 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.135150] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bd36947-b1ac-4f79-afb3-213f161dec4d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.141828] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2371e38-be22-48fd-aa27-6f092a3c6e3b {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.154243] env[61273]: DEBUG nova.compute.provider_tree [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 670.154701] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 88b39bd790c241c4bec1105fdb901201 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 670.161919] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 88b39bd790c241c4bec1105fdb901201 [ 670.657834] env[61273]: DEBUG nova.scheduler.client.report [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 670.660394] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg a7a92146561444c986e401ef2c82de1b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 670.677404] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a7a92146561444c986e401ef2c82de1b [ 671.162627] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61273) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 671.162914] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 13.037s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 671.163190] env[61273]: DEBUG oslo_concurrency.lockutils [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 28.282s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 671.165066] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] Expecting reply to msg 2d52aa77117046d187775c4f5fb71d6d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 671.201009] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2d52aa77117046d187775c4f5fb71d6d [ 671.985995] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-714153c3-f69e-4396-9393-247695f18d9a {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.992107] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eae599c9-ae8a-4037-b464-dfe37b6ebd8a {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.024510] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a097597d-a0ed-4f84-ae8d-23b9fe766b10 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.031548] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81bd315f-8dd2-4067-8f8a-322f6e0133c5 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.047004] env[61273]: DEBUG nova.compute.provider_tree [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 672.047506] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] Expecting reply to msg 329dd6202cc04623a8c71401fb6517fc in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 672.054574] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 329dd6202cc04623a8c71401fb6517fc [ 672.550515] env[61273]: DEBUG nova.scheduler.client.report [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 672.553041] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] Expecting reply to msg 8ba4ad75523f4a2199b48948ee519662 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 672.564545] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8ba4ad75523f4a2199b48948ee519662 [ 673.055893] env[61273]: DEBUG oslo_concurrency.lockutils [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.893s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 673.056578] env[61273]: ERROR nova.compute.manager [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 65fe8495-4b99-4083-8128-022b6c4de52b, please check neutron logs for more information. [ 673.056578] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] Traceback (most recent call last): [ 673.056578] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 673.056578] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] self.driver.spawn(context, instance, image_meta, [ 673.056578] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 673.056578] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] self._vmops.spawn(context, instance, image_meta, injected_files, [ 673.056578] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 673.056578] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] vm_ref = self.build_virtual_machine(instance, [ 673.056578] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 673.056578] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] vif_infos = vmwarevif.get_vif_info(self._session, [ 673.056578] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 673.056898] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] for vif in network_info: [ 673.056898] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 673.056898] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] return self._sync_wrapper(fn, *args, **kwargs) [ 673.056898] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 673.056898] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] self.wait() [ 673.056898] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 673.056898] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] self[:] = self._gt.wait() [ 673.056898] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 673.056898] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] return self._exit_event.wait() [ 673.056898] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 673.056898] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] result = hub.switch() [ 673.056898] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 673.056898] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] return self.greenlet.switch() [ 673.057225] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 673.057225] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] result = function(*args, **kwargs) [ 673.057225] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 673.057225] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] return func(*args, **kwargs) [ 673.057225] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 673.057225] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] raise e [ 673.057225] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 673.057225] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] nwinfo = self.network_api.allocate_for_instance( [ 673.057225] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 673.057225] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] created_port_ids = self._update_ports_for_instance( [ 673.057225] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 673.057225] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] with excutils.save_and_reraise_exception(): [ 673.057225] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 673.057546] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] self.force_reraise() [ 673.057546] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 673.057546] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] raise self.value [ 673.057546] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 673.057546] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] updated_port = self._update_port( [ 673.057546] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 673.057546] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] _ensure_no_port_binding_failure(port) [ 673.057546] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 673.057546] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] raise exception.PortBindingFailed(port_id=port['id']) [ 673.057546] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] nova.exception.PortBindingFailed: Binding failed for port 65fe8495-4b99-4083-8128-022b6c4de52b, please check neutron logs for more information. [ 673.057546] env[61273]: ERROR nova.compute.manager [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] [ 673.057895] env[61273]: DEBUG nova.compute.utils [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] Binding failed for port 65fe8495-4b99-4083-8128-022b6c4de52b, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 673.058565] env[61273]: DEBUG oslo_concurrency.lockutils [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 27.881s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 673.060570] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] Expecting reply to msg 73ff3bbe57ed421fae196bb86661207d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 673.062478] env[61273]: DEBUG nova.compute.manager [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] Build of instance 1336becb-9691-490c-86ea-3bc70d13d7df was re-scheduled: Binding failed for port 65fe8495-4b99-4083-8128-022b6c4de52b, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 673.065227] env[61273]: DEBUG nova.compute.manager [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 673.065227] env[61273]: DEBUG oslo_concurrency.lockutils [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] Acquiring lock "refresh_cache-1336becb-9691-490c-86ea-3bc70d13d7df" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 673.065227] env[61273]: DEBUG oslo_concurrency.lockutils [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] Acquired lock "refresh_cache-1336becb-9691-490c-86ea-3bc70d13d7df" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 673.065227] env[61273]: DEBUG nova.network.neutron [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 673.065227] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] Expecting reply to msg 8a97caec215343418710b87f9f078180 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 673.070109] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8a97caec215343418710b87f9f078180 [ 673.094030] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 73ff3bbe57ed421fae196bb86661207d [ 673.584882] env[61273]: DEBUG nova.network.neutron [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 673.649254] env[61273]: DEBUG nova.network.neutron [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 673.649874] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] Expecting reply to msg 7e3556c082a84757b8f82407485e6f6d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 673.661371] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7e3556c082a84757b8f82407485e6f6d [ 673.898628] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98bfe5d1-4d9e-461d-a196-5f8a16c4322c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.905890] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c1f4aba-b5da-47f2-b64f-e427be4e3af6 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.936691] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e575d3d8-3cbb-42bf-b349-70c8322a1062 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.945263] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83b626a7-6a10-49d0-8055-3265ff451db8 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.959826] env[61273]: DEBUG nova.compute.provider_tree [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 673.960336] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] Expecting reply to msg d58927ff53044287af972b3146e6acb1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 673.967175] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d58927ff53044287af972b3146e6acb1 [ 674.153264] env[61273]: DEBUG oslo_concurrency.lockutils [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] Releasing lock "refresh_cache-1336becb-9691-490c-86ea-3bc70d13d7df" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 674.153524] env[61273]: DEBUG nova.compute.manager [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 674.153733] env[61273]: DEBUG nova.compute.manager [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 674.153933] env[61273]: DEBUG nova.network.neutron [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 674.187766] env[61273]: DEBUG nova.network.neutron [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 674.188408] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] Expecting reply to msg 6c266211e0ba4651bc847230f14fb2af in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 674.201047] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6c266211e0ba4651bc847230f14fb2af [ 674.462683] env[61273]: DEBUG nova.scheduler.client.report [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 674.465088] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] Expecting reply to msg f3fec18b0afe4e639b35feda3aa85a54 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 674.478079] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f3fec18b0afe4e639b35feda3aa85a54 [ 674.695107] env[61273]: DEBUG nova.network.neutron [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 674.695644] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] Expecting reply to msg 15cc0103514e433b829fd85c9cf0813e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 674.704145] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 15cc0103514e433b829fd85c9cf0813e [ 674.969253] env[61273]: DEBUG oslo_concurrency.lockutils [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.911s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 674.969927] env[61273]: ERROR nova.compute.manager [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 17c48dca-cfb0-4cdc-9269-490a11efa464, please check neutron logs for more information. [ 674.969927] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] Traceback (most recent call last): [ 674.969927] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 674.969927] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] self.driver.spawn(context, instance, image_meta, [ 674.969927] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 674.969927] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] self._vmops.spawn(context, instance, image_meta, injected_files, [ 674.969927] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 674.969927] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] vm_ref = self.build_virtual_machine(instance, [ 674.969927] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 674.969927] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] vif_infos = vmwarevif.get_vif_info(self._session, [ 674.969927] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 674.970256] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] for vif in network_info: [ 674.970256] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 674.970256] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] return self._sync_wrapper(fn, *args, **kwargs) [ 674.970256] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 674.970256] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] self.wait() [ 674.970256] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 674.970256] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] self[:] = self._gt.wait() [ 674.970256] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 674.970256] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] return self._exit_event.wait() [ 674.970256] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 674.970256] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] result = hub.switch() [ 674.970256] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 674.970256] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] return self.greenlet.switch() [ 674.970610] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 674.970610] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] result = function(*args, **kwargs) [ 674.970610] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 674.970610] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] return func(*args, **kwargs) [ 674.970610] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 674.970610] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] raise e [ 674.970610] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 674.970610] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] nwinfo = self.network_api.allocate_for_instance( [ 674.970610] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 674.970610] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] created_port_ids = self._update_ports_for_instance( [ 674.970610] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 674.970610] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] with excutils.save_and_reraise_exception(): [ 674.970610] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 674.970967] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] self.force_reraise() [ 674.970967] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 674.970967] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] raise self.value [ 674.970967] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 674.970967] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] updated_port = self._update_port( [ 674.970967] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 674.970967] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] _ensure_no_port_binding_failure(port) [ 674.970967] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 674.970967] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] raise exception.PortBindingFailed(port_id=port['id']) [ 674.970967] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] nova.exception.PortBindingFailed: Binding failed for port 17c48dca-cfb0-4cdc-9269-490a11efa464, please check neutron logs for more information. [ 674.970967] env[61273]: ERROR nova.compute.manager [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] [ 674.971292] env[61273]: DEBUG nova.compute.utils [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] Binding failed for port 17c48dca-cfb0-4cdc-9269-490a11efa464, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 674.971877] env[61273]: DEBUG oslo_concurrency.lockutils [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 27.287s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 674.974061] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] Expecting reply to msg 9e6012210f1e4992a77f869712b456a9 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 674.975355] env[61273]: DEBUG nova.compute.manager [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] Build of instance 30ed4438-4f74-4bc3-a6cc-a59420751940 was re-scheduled: Binding failed for port 17c48dca-cfb0-4cdc-9269-490a11efa464, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 674.978382] env[61273]: DEBUG nova.compute.manager [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 674.978624] env[61273]: DEBUG oslo_concurrency.lockutils [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] Acquiring lock "refresh_cache-30ed4438-4f74-4bc3-a6cc-a59420751940" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 674.978845] env[61273]: DEBUG oslo_concurrency.lockutils [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] Acquired lock "refresh_cache-30ed4438-4f74-4bc3-a6cc-a59420751940" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 674.978932] env[61273]: DEBUG nova.network.neutron [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 674.979333] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] Expecting reply to msg 5402118255574516b2f7c11857c07784 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 674.985805] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5402118255574516b2f7c11857c07784 [ 675.008637] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9e6012210f1e4992a77f869712b456a9 [ 675.200269] env[61273]: INFO nova.compute.manager [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] [instance: 1336becb-9691-490c-86ea-3bc70d13d7df] Took 1.04 seconds to deallocate network for instance. [ 675.200269] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] Expecting reply to msg ae7033fe7a784f4984a9f3f1b62cf877 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 675.231364] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ae7033fe7a784f4984a9f3f1b62cf877 [ 675.525371] env[61273]: DEBUG nova.network.neutron [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 675.623386] env[61273]: DEBUG nova.network.neutron [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 675.624564] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] Expecting reply to msg 66462a22c217452e9997cc43096e9845 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 675.633115] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 66462a22c217452e9997cc43096e9845 [ 675.704460] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] Expecting reply to msg 4b8cce0826024be1874ca960dbd15324 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 675.748543] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4b8cce0826024be1874ca960dbd15324 [ 675.900039] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f133289-28b5-4240-a6ab-e0a7dc21573b {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.901965] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfc97738-5a43-49ef-9792-3a76b3beb93d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.932746] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dd0897e-5e11-450a-af16-d5980c28f82d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.940215] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64e12283-abf3-4ba5-b6cc-a28310828ac4 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.952927] env[61273]: DEBUG nova.compute.provider_tree [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 675.953520] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] Expecting reply to msg 0d3d9f43703e46cb89f34bd717999a7b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 675.961510] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0d3d9f43703e46cb89f34bd717999a7b [ 676.128669] env[61273]: DEBUG oslo_concurrency.lockutils [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] Releasing lock "refresh_cache-30ed4438-4f74-4bc3-a6cc-a59420751940" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 676.128913] env[61273]: DEBUG nova.compute.manager [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 676.129095] env[61273]: DEBUG nova.compute.manager [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 676.129264] env[61273]: DEBUG nova.network.neutron [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 676.144643] env[61273]: DEBUG nova.network.neutron [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 676.145207] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] Expecting reply to msg e61a3247d7d9410b9c173a3d7d97d9e0 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 676.152186] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e61a3247d7d9410b9c173a3d7d97d9e0 [ 676.224910] env[61273]: INFO nova.scheduler.client.report [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] Deleted allocations for instance 1336becb-9691-490c-86ea-3bc70d13d7df [ 676.231724] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] Expecting reply to msg d90ad6780e694f2ca2de51d37d8d1b7f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 676.249363] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d90ad6780e694f2ca2de51d37d8d1b7f [ 676.456047] env[61273]: DEBUG nova.scheduler.client.report [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 676.458402] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] Expecting reply to msg 9dcb8e61bae44eaba79fbb1c89a267b7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 676.472399] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9dcb8e61bae44eaba79fbb1c89a267b7 [ 676.652460] env[61273]: DEBUG nova.network.neutron [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 676.653119] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] Expecting reply to msg 0a6e21575fc94909abce51b31c8f6043 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 676.662357] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0a6e21575fc94909abce51b31c8f6043 [ 676.739803] env[61273]: DEBUG oslo_concurrency.lockutils [None req-26dddff7-42fe-47e4-843c-43822c25f550 tempest-ServersAdminNegativeTestJSON-285345347 tempest-ServersAdminNegativeTestJSON-285345347-project-member] Lock "1336becb-9691-490c-86ea-3bc70d13d7df" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 117.566s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 676.740817] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Expecting reply to msg 253cbf88bca240169395b645cce1a305 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 676.751252] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 253cbf88bca240169395b645cce1a305 [ 676.961359] env[61273]: DEBUG oslo_concurrency.lockutils [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.989s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 676.962149] env[61273]: ERROR nova.compute.manager [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 7cf3ed88-f9cd-4e91-8812-0a8b7f548c5c, please check neutron logs for more information. [ 676.962149] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] Traceback (most recent call last): [ 676.962149] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 676.962149] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] self.driver.spawn(context, instance, image_meta, [ 676.962149] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 676.962149] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] self._vmops.spawn(context, instance, image_meta, injected_files, [ 676.962149] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 676.962149] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] vm_ref = self.build_virtual_machine(instance, [ 676.962149] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 676.962149] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] vif_infos = vmwarevif.get_vif_info(self._session, [ 676.962149] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 676.962548] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] for vif in network_info: [ 676.962548] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 676.962548] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] return self._sync_wrapper(fn, *args, **kwargs) [ 676.962548] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 676.962548] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] self.wait() [ 676.962548] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 676.962548] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] self[:] = self._gt.wait() [ 676.962548] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 676.962548] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] return self._exit_event.wait() [ 676.962548] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 676.962548] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] result = hub.switch() [ 676.962548] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 676.962548] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] return self.greenlet.switch() [ 676.963033] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 676.963033] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] result = function(*args, **kwargs) [ 676.963033] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 676.963033] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] return func(*args, **kwargs) [ 676.963033] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 676.963033] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] raise e [ 676.963033] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 676.963033] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] nwinfo = self.network_api.allocate_for_instance( [ 676.963033] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 676.963033] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] created_port_ids = self._update_ports_for_instance( [ 676.963033] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 676.963033] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] with excutils.save_and_reraise_exception(): [ 676.963033] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 676.963403] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] self.force_reraise() [ 676.963403] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 676.963403] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] raise self.value [ 676.963403] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 676.963403] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] updated_port = self._update_port( [ 676.963403] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 676.963403] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] _ensure_no_port_binding_failure(port) [ 676.963403] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 676.963403] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] raise exception.PortBindingFailed(port_id=port['id']) [ 676.963403] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] nova.exception.PortBindingFailed: Binding failed for port 7cf3ed88-f9cd-4e91-8812-0a8b7f548c5c, please check neutron logs for more information. [ 676.963403] env[61273]: ERROR nova.compute.manager [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] [ 676.963732] env[61273]: DEBUG nova.compute.utils [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] Binding failed for port 7cf3ed88-f9cd-4e91-8812-0a8b7f548c5c, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 676.964098] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.932s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 676.965581] env[61273]: INFO nova.compute.claims [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 676.967297] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] Expecting reply to msg c442fff49d49412d86aa811fe105f7aa in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 676.968579] env[61273]: DEBUG nova.compute.manager [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] Build of instance 767d7956-954b-4be7-8cc6-45872ff4cfce was re-scheduled: Binding failed for port 7cf3ed88-f9cd-4e91-8812-0a8b7f548c5c, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 676.969029] env[61273]: DEBUG nova.compute.manager [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 676.969261] env[61273]: DEBUG oslo_concurrency.lockutils [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] Acquiring lock "refresh_cache-767d7956-954b-4be7-8cc6-45872ff4cfce" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 676.969408] env[61273]: DEBUG oslo_concurrency.lockutils [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] Acquired lock "refresh_cache-767d7956-954b-4be7-8cc6-45872ff4cfce" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 676.969566] env[61273]: DEBUG nova.network.neutron [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 676.969931] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] Expecting reply to msg 0b7731e966ee414eb6e7eddac0f2fcc5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 676.976786] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0b7731e966ee414eb6e7eddac0f2fcc5 [ 676.999061] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c442fff49d49412d86aa811fe105f7aa [ 677.161618] env[61273]: INFO nova.compute.manager [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] [instance: 30ed4438-4f74-4bc3-a6cc-a59420751940] Took 1.03 seconds to deallocate network for instance. [ 677.163673] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] Expecting reply to msg c533c385918b4f0c890fb951f1f7b202 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 677.199002] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c533c385918b4f0c890fb951f1f7b202 [ 677.242965] env[61273]: DEBUG nova.compute.manager [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 677.245268] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Expecting reply to msg cf3e2801f7e544c7a480ade35ade4309 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 677.280104] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cf3e2801f7e544c7a480ade35ade4309 [ 677.473137] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] Expecting reply to msg 58fb8db6a1a44bd3aa2524b0340dc236 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 677.480922] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 58fb8db6a1a44bd3aa2524b0340dc236 [ 677.490703] env[61273]: DEBUG nova.network.neutron [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 677.603451] env[61273]: DEBUG nova.network.neutron [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 677.604077] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] Expecting reply to msg bf851643d08548d3afdad874d1f5f581 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 677.619254] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bf851643d08548d3afdad874d1f5f581 [ 677.668736] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] Expecting reply to msg b65bdcb1d9c54dc4b29ccc205746adde in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 677.705431] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b65bdcb1d9c54dc4b29ccc205746adde [ 677.766316] env[61273]: DEBUG oslo_concurrency.lockutils [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 678.106751] env[61273]: DEBUG oslo_concurrency.lockutils [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] Releasing lock "refresh_cache-767d7956-954b-4be7-8cc6-45872ff4cfce" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 678.106751] env[61273]: DEBUG nova.compute.manager [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 678.106959] env[61273]: DEBUG nova.compute.manager [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 678.107026] env[61273]: DEBUG nova.network.neutron [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 678.139920] env[61273]: DEBUG nova.network.neutron [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 678.140571] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] Expecting reply to msg 481bed1369c24728a3807e7e13054365 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 678.147588] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 481bed1369c24728a3807e7e13054365 [ 678.190248] env[61273]: INFO nova.scheduler.client.report [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] Deleted allocations for instance 30ed4438-4f74-4bc3-a6cc-a59420751940 [ 678.198958] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] Expecting reply to msg 608c42933871492ab70852051c03599f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 678.213664] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 608c42933871492ab70852051c03599f [ 678.300885] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91468aab-09f2-4c0e-be40-1f6b25073a70 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.309260] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98b52090-15d0-4a66-9b2c-8b977c626bb9 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.340019] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c8e73d4-69ab-414f-b310-6571eb4aee4e {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.347921] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-496dfb80-1f9c-4747-8d57-9619549637d7 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.361245] env[61273]: DEBUG nova.compute.provider_tree [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 678.361782] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] Expecting reply to msg e2b02df97fa640458393334f4a247ea2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 678.369017] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e2b02df97fa640458393334f4a247ea2 [ 678.642795] env[61273]: DEBUG nova.network.neutron [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 678.643315] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] Expecting reply to msg 1d9cf17ac2374e86a6bb6cb5a2380b49 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 678.653593] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1d9cf17ac2374e86a6bb6cb5a2380b49 [ 678.704041] env[61273]: DEBUG oslo_concurrency.lockutils [None req-9857f842-36a2-4960-9ce2-230cb2785401 tempest-ServersTestManualDisk-2053152108 tempest-ServersTestManualDisk-2053152108-project-member] Lock "30ed4438-4f74-4bc3-a6cc-a59420751940" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 114.884s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 678.704678] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Expecting reply to msg 50b7068c00bb4b72a861350427e37048 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 678.719670] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 50b7068c00bb4b72a861350427e37048 [ 678.864730] env[61273]: DEBUG nova.scheduler.client.report [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 678.867036] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] Expecting reply to msg 2a605605705447f7860f4a117dc89e5b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 678.880644] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2a605605705447f7860f4a117dc89e5b [ 679.146043] env[61273]: INFO nova.compute.manager [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] [instance: 767d7956-954b-4be7-8cc6-45872ff4cfce] Took 1.04 seconds to deallocate network for instance. [ 679.148279] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] Expecting reply to msg 59bd51f1f5b84ba4bfb9613ee146b42d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 679.188724] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 59bd51f1f5b84ba4bfb9613ee146b42d [ 679.206651] env[61273]: DEBUG nova.compute.manager [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 679.208498] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Expecting reply to msg b644f85711e7493fa72013f3767d0f60 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 679.247562] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b644f85711e7493fa72013f3767d0f60 [ 679.374150] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.410s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 679.374671] env[61273]: DEBUG nova.compute.manager [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 679.376314] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] Expecting reply to msg c2d53bae46a34898a83d754ead275460 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 679.377393] env[61273]: DEBUG oslo_concurrency.lockutils [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.970s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 679.378702] env[61273]: INFO nova.compute.claims [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 679.380394] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] Expecting reply to msg 3e4d1ea8aacc472daab0ede04f91d685 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 679.417149] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c2d53bae46a34898a83d754ead275460 [ 679.425017] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3e4d1ea8aacc472daab0ede04f91d685 [ 679.654570] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] Expecting reply to msg 25f90963df394d8a9254a43ecc85aef4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 679.718332] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 25f90963df394d8a9254a43ecc85aef4 [ 679.739754] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 679.884151] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] Expecting reply to msg 9c468f27dbe64d4ab4d0d79a5552cc64 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 679.885871] env[61273]: DEBUG nova.compute.utils [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 679.886438] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] Expecting reply to msg df6236b3a78546ac9f72f78e2e7dceef in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 679.887180] env[61273]: DEBUG nova.compute.manager [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 679.887345] env[61273]: DEBUG nova.network.neutron [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 679.896580] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9c468f27dbe64d4ab4d0d79a5552cc64 [ 679.904857] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg df6236b3a78546ac9f72f78e2e7dceef [ 679.958085] env[61273]: DEBUG nova.policy [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'eaba9dda37534605bc42ae92453f87c6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '094287f1b5004fa8a13d982851ec9347', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 680.181344] env[61273]: INFO nova.scheduler.client.report [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] Deleted allocations for instance 767d7956-954b-4be7-8cc6-45872ff4cfce [ 680.188192] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] Expecting reply to msg 3a754d3a46024c86b09e43a5e4a7f8fc in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 680.200836] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3a754d3a46024c86b09e43a5e4a7f8fc [ 680.329395] env[61273]: DEBUG nova.network.neutron [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] Successfully created port: 967f4a13-789b-471b-ab8f-72b7480a5afc {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 680.390665] env[61273]: DEBUG nova.compute.manager [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 680.392465] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] Expecting reply to msg 5a994b7619fa44d5a4caf3bcb3457e94 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 680.427138] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5a994b7619fa44d5a4caf3bcb3457e94 [ 680.690274] env[61273]: DEBUG oslo_concurrency.lockutils [None req-117282fb-6d7f-43c4-ac08-bdd3abb18aff tempest-ServerActionsTestOtherB-1209735886 tempest-ServerActionsTestOtherB-1209735886-project-member] Lock "767d7956-954b-4be7-8cc6-45872ff4cfce" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 114.563s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 680.690977] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Expecting reply to msg 2d5755cab80b4acfa090406cd210a0a3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 680.704859] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2d5755cab80b4acfa090406cd210a0a3 [ 680.845324] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e6e8f9e-0a61-4e55-9c97-6d8e98fd5358 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.853431] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0bc9b88-c870-45c9-9bb9-f12b91cccbcd {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.886497] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af14fe6d-d596-420f-a5d5-beef45bf9805 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.895631] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ee33d8c-8f2c-4f57-af71-bc7995601c97 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.903703] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] Expecting reply to msg 1dff699595c1468d8fcc0d1c270e478d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 680.914723] env[61273]: DEBUG nova.compute.provider_tree [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 680.915244] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] Expecting reply to msg 096dfdb754cf4311bc68b3a462be79b4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 680.924651] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 096dfdb754cf4311bc68b3a462be79b4 [ 680.944111] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1dff699595c1468d8fcc0d1c270e478d [ 681.196453] env[61273]: DEBUG nova.compute.manager [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 681.198187] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Expecting reply to msg 6c9b1c8441df47e7aa74563d4b8c39aa in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 681.251254] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6c9b1c8441df47e7aa74563d4b8c39aa [ 681.407269] env[61273]: DEBUG nova.compute.manager [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 681.418445] env[61273]: DEBUG nova.scheduler.client.report [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 681.421032] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] Expecting reply to msg ff8b9aa3bab04a1d8540e5dd23be99b1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 681.437430] env[61273]: DEBUG nova.virt.hardware [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 681.437678] env[61273]: DEBUG nova.virt.hardware [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 681.437854] env[61273]: DEBUG nova.virt.hardware [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 681.438044] env[61273]: DEBUG nova.virt.hardware [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 681.438187] env[61273]: DEBUG nova.virt.hardware [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 681.438334] env[61273]: DEBUG nova.virt.hardware [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 681.438537] env[61273]: DEBUG nova.virt.hardware [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 681.438693] env[61273]: DEBUG nova.virt.hardware [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 681.438855] env[61273]: DEBUG nova.virt.hardware [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 681.439013] env[61273]: DEBUG nova.virt.hardware [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 681.439186] env[61273]: DEBUG nova.virt.hardware [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 681.440113] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e9aeb31-c5bd-4211-a5a2-456b0a78fe57 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.443126] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ff8b9aa3bab04a1d8540e5dd23be99b1 [ 681.450236] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d40251a0-07aa-44a0-b0fd-737e0caa331a {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.740049] env[61273]: DEBUG oslo_concurrency.lockutils [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 681.923787] env[61273]: DEBUG oslo_concurrency.lockutils [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.546s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 681.924512] env[61273]: DEBUG nova.compute.manager [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 681.927326] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] Expecting reply to msg 57faa9f493454cfa973165a4737ec1e2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 681.928813] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.728s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 681.930828] env[61273]: INFO nova.compute.claims [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] [instance: 297a5546-6159-462c-a436-032d94855c00] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 681.933204] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] Expecting reply to msg ffba6bad92c74ab8a0a7f0ba75e4a3e3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 681.987734] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 57faa9f493454cfa973165a4737ec1e2 [ 681.995768] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ffba6bad92c74ab8a0a7f0ba75e4a3e3 [ 682.396345] env[61273]: DEBUG nova.compute.manager [req-13f49180-b086-42be-afd0-4bbfd2a0bf1f req-921dcd9d-df4d-4413-ad25-81d0479b7d72 service nova] [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] Received event network-changed-967f4a13-789b-471b-ab8f-72b7480a5afc {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 682.396544] env[61273]: DEBUG nova.compute.manager [req-13f49180-b086-42be-afd0-4bbfd2a0bf1f req-921dcd9d-df4d-4413-ad25-81d0479b7d72 service nova] [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] Refreshing instance network info cache due to event network-changed-967f4a13-789b-471b-ab8f-72b7480a5afc. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 682.396759] env[61273]: DEBUG oslo_concurrency.lockutils [req-13f49180-b086-42be-afd0-4bbfd2a0bf1f req-921dcd9d-df4d-4413-ad25-81d0479b7d72 service nova] Acquiring lock "refresh_cache-12c47e99-faf4-4083-a46f-4e33c451e980" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 682.396901] env[61273]: DEBUG oslo_concurrency.lockutils [req-13f49180-b086-42be-afd0-4bbfd2a0bf1f req-921dcd9d-df4d-4413-ad25-81d0479b7d72 service nova] Acquired lock "refresh_cache-12c47e99-faf4-4083-a46f-4e33c451e980" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 682.397149] env[61273]: DEBUG nova.network.neutron [req-13f49180-b086-42be-afd0-4bbfd2a0bf1f req-921dcd9d-df4d-4413-ad25-81d0479b7d72 service nova] [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] Refreshing network info cache for port 967f4a13-789b-471b-ab8f-72b7480a5afc {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 682.397564] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-13f49180-b086-42be-afd0-4bbfd2a0bf1f req-921dcd9d-df4d-4413-ad25-81d0479b7d72 service nova] Expecting reply to msg e79e706b60ec4afd872df4d89c244dfa in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 682.404242] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e79e706b60ec4afd872df4d89c244dfa [ 682.440129] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] Expecting reply to msg dbced7c01b7f4a17ad5fe053d2118c4c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 682.441942] env[61273]: DEBUG nova.compute.utils [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 682.442499] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] Expecting reply to msg bf8306cfeae84c68bf6cd724a78fcb6b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 682.443421] env[61273]: DEBUG nova.compute.manager [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 682.443575] env[61273]: DEBUG nova.network.neutron [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 682.451421] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dbced7c01b7f4a17ad5fe053d2118c4c [ 682.459439] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bf8306cfeae84c68bf6cd724a78fcb6b [ 682.541144] env[61273]: DEBUG nova.policy [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '824e8c3c583b4bf39d5d386d7fe06ea6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f484798dc24b465ba441d235e2943348', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 682.591937] env[61273]: ERROR nova.compute.manager [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 967f4a13-789b-471b-ab8f-72b7480a5afc, please check neutron logs for more information. [ 682.591937] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 682.591937] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 682.591937] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 682.591937] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 682.591937] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 682.591937] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 682.591937] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 682.591937] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 682.591937] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 682.591937] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 682.591937] env[61273]: ERROR nova.compute.manager raise self.value [ 682.591937] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 682.591937] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 682.591937] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 682.591937] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 682.592519] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 682.592519] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 682.592519] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 967f4a13-789b-471b-ab8f-72b7480a5afc, please check neutron logs for more information. [ 682.592519] env[61273]: ERROR nova.compute.manager [ 682.592519] env[61273]: Traceback (most recent call last): [ 682.592519] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 682.592519] env[61273]: listener.cb(fileno) [ 682.592519] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 682.592519] env[61273]: result = function(*args, **kwargs) [ 682.592519] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 682.592519] env[61273]: return func(*args, **kwargs) [ 682.592519] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 682.592519] env[61273]: raise e [ 682.592519] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 682.592519] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 682.592519] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 682.592519] env[61273]: created_port_ids = self._update_ports_for_instance( [ 682.592519] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 682.592519] env[61273]: with excutils.save_and_reraise_exception(): [ 682.592519] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 682.592519] env[61273]: self.force_reraise() [ 682.592519] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 682.592519] env[61273]: raise self.value [ 682.592519] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 682.592519] env[61273]: updated_port = self._update_port( [ 682.592519] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 682.592519] env[61273]: _ensure_no_port_binding_failure(port) [ 682.592519] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 682.592519] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 682.593335] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 967f4a13-789b-471b-ab8f-72b7480a5afc, please check neutron logs for more information. [ 682.593335] env[61273]: Removing descriptor: 19 [ 682.593335] env[61273]: ERROR nova.compute.manager [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 967f4a13-789b-471b-ab8f-72b7480a5afc, please check neutron logs for more information. [ 682.593335] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] Traceback (most recent call last): [ 682.593335] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 682.593335] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] yield resources [ 682.593335] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 682.593335] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] self.driver.spawn(context, instance, image_meta, [ 682.593335] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 682.593335] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] self._vmops.spawn(context, instance, image_meta, injected_files, [ 682.593335] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 682.593335] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] vm_ref = self.build_virtual_machine(instance, [ 682.593695] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 682.593695] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] vif_infos = vmwarevif.get_vif_info(self._session, [ 682.593695] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 682.593695] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] for vif in network_info: [ 682.593695] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 682.593695] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] return self._sync_wrapper(fn, *args, **kwargs) [ 682.593695] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 682.593695] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] self.wait() [ 682.593695] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 682.593695] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] self[:] = self._gt.wait() [ 682.593695] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 682.593695] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] return self._exit_event.wait() [ 682.593695] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 682.594027] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] result = hub.switch() [ 682.594027] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 682.594027] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] return self.greenlet.switch() [ 682.594027] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 682.594027] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] result = function(*args, **kwargs) [ 682.594027] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 682.594027] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] return func(*args, **kwargs) [ 682.594027] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 682.594027] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] raise e [ 682.594027] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 682.594027] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] nwinfo = self.network_api.allocate_for_instance( [ 682.594027] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 682.594027] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] created_port_ids = self._update_ports_for_instance( [ 682.594357] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 682.594357] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] with excutils.save_and_reraise_exception(): [ 682.594357] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 682.594357] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] self.force_reraise() [ 682.594357] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 682.594357] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] raise self.value [ 682.594357] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 682.594357] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] updated_port = self._update_port( [ 682.594357] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 682.594357] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] _ensure_no_port_binding_failure(port) [ 682.594357] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 682.594357] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] raise exception.PortBindingFailed(port_id=port['id']) [ 682.594665] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] nova.exception.PortBindingFailed: Binding failed for port 967f4a13-789b-471b-ab8f-72b7480a5afc, please check neutron logs for more information. [ 682.594665] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] [ 682.594665] env[61273]: INFO nova.compute.manager [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] Terminating instance [ 682.594665] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] Acquiring lock "refresh_cache-12c47e99-faf4-4083-a46f-4e33c451e980" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 682.919017] env[61273]: DEBUG nova.network.neutron [req-13f49180-b086-42be-afd0-4bbfd2a0bf1f req-921dcd9d-df4d-4413-ad25-81d0479b7d72 service nova] [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 682.947793] env[61273]: DEBUG nova.compute.manager [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 682.949669] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] Expecting reply to msg 7713f58802bb42b0a930d1c1632fb2f0 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 682.996589] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7713f58802bb42b0a930d1c1632fb2f0 [ 683.037045] env[61273]: DEBUG nova.network.neutron [req-13f49180-b086-42be-afd0-4bbfd2a0bf1f req-921dcd9d-df4d-4413-ad25-81d0479b7d72 service nova] [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 683.037558] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-13f49180-b086-42be-afd0-4bbfd2a0bf1f req-921dcd9d-df4d-4413-ad25-81d0479b7d72 service nova] Expecting reply to msg 2ba8623d2c5342d980b27039a24a5e0f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 683.051014] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2ba8623d2c5342d980b27039a24a5e0f [ 683.283735] env[61273]: DEBUG nova.network.neutron [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] Successfully created port: d1633ad7-ab0c-43f3-bf09-b74926bf1ccf {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 683.332204] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f70e99a2-f371-49d9-8cf8-99a7e76f833a {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.338764] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8c82f50-18b3-4c7f-a873-d42d57d5c7a7 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.369832] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18bdf792-6276-4df7-99cd-6e25dd4b7733 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.377466] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bd39e44-e7d8-4132-9d49-ae77076d57a2 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.399487] env[61273]: DEBUG nova.compute.provider_tree [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 683.399793] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] Expecting reply to msg d0cdfb012fba4619ae5002f665ae9539 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 683.407673] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d0cdfb012fba4619ae5002f665ae9539 [ 683.457136] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] Expecting reply to msg a5669b1e066a4dd38e26d7a2816b304a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 683.509557] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a5669b1e066a4dd38e26d7a2816b304a [ 683.539681] env[61273]: DEBUG oslo_concurrency.lockutils [req-13f49180-b086-42be-afd0-4bbfd2a0bf1f req-921dcd9d-df4d-4413-ad25-81d0479b7d72 service nova] Releasing lock "refresh_cache-12c47e99-faf4-4083-a46f-4e33c451e980" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 683.540089] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] Acquired lock "refresh_cache-12c47e99-faf4-4083-a46f-4e33c451e980" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 683.540288] env[61273]: DEBUG nova.network.neutron [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 683.540716] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] Expecting reply to msg edc612dc8d364ee59694bdd8187e7886 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 683.547612] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg edc612dc8d364ee59694bdd8187e7886 [ 683.902245] env[61273]: DEBUG nova.scheduler.client.report [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 683.904685] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] Expecting reply to msg 1bd12e22679d4105bcab402e1ad568d8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 683.922713] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1bd12e22679d4105bcab402e1ad568d8 [ 683.961311] env[61273]: DEBUG nova.compute.manager [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 683.989014] env[61273]: DEBUG nova.virt.hardware [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 683.989276] env[61273]: DEBUG nova.virt.hardware [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 683.989425] env[61273]: DEBUG nova.virt.hardware [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 683.989613] env[61273]: DEBUG nova.virt.hardware [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 683.989762] env[61273]: DEBUG nova.virt.hardware [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 683.989903] env[61273]: DEBUG nova.virt.hardware [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 683.990101] env[61273]: DEBUG nova.virt.hardware [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 683.990278] env[61273]: DEBUG nova.virt.hardware [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 683.990422] env[61273]: DEBUG nova.virt.hardware [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 683.990574] env[61273]: DEBUG nova.virt.hardware [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 683.991868] env[61273]: DEBUG nova.virt.hardware [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 683.993125] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9438c2cf-b503-49cf-9792-b18bf43eccd5 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.001668] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d38f94c-b4fe-4bc2-ba21-ea47274ab109 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.085361] env[61273]: DEBUG nova.network.neutron [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 684.094132] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Acquiring lock "e62c0b97-cfa7-4acf-bdc5-93d6996c7806" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 684.094403] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Lock "e62c0b97-cfa7-4acf-bdc5-93d6996c7806" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 684.291965] env[61273]: DEBUG nova.network.neutron [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 684.292484] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] Expecting reply to msg dbfa17ac34bb4512bad6164020a790cf in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 684.300637] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dbfa17ac34bb4512bad6164020a790cf [ 684.416053] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.482s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 684.416053] env[61273]: DEBUG nova.compute.manager [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] [instance: 297a5546-6159-462c-a436-032d94855c00] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 684.416053] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] Expecting reply to msg 92fa368bdf294f49810664036111000b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 684.416053] env[61273]: DEBUG oslo_concurrency.lockutils [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 26.786s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 684.416053] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] Expecting reply to msg f0c9ff79210c4fdf965fb66cd386d795 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 684.431173] env[61273]: DEBUG nova.compute.manager [req-b3451f15-98c0-481f-a9a5-58c5b8b73a42 req-9071ed3f-b841-48b4-83d2-ad87786b4be8 service nova] [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] Received event network-vif-deleted-967f4a13-789b-471b-ab8f-72b7480a5afc {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 684.455258] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 92fa368bdf294f49810664036111000b [ 684.455672] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f0c9ff79210c4fdf965fb66cd386d795 [ 684.796384] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] Releasing lock "refresh_cache-12c47e99-faf4-4083-a46f-4e33c451e980" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 684.796866] env[61273]: DEBUG nova.compute.manager [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 684.797066] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 684.797367] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-31d5f969-966e-4f36-a2f6-ac33147f197f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.807364] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7b137e8-6e00-4a68-a96b-25de134c9e67 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.838575] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 12c47e99-faf4-4083-a46f-4e33c451e980 could not be found. [ 684.838841] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 684.839022] env[61273]: INFO nova.compute.manager [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] Took 0.04 seconds to destroy the instance on the hypervisor. [ 684.839273] env[61273]: DEBUG oslo.service.loopingcall [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 684.839513] env[61273]: DEBUG nova.compute.manager [-] [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 684.839581] env[61273]: DEBUG nova.network.neutron [-] [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 684.866883] env[61273]: DEBUG nova.network.neutron [-] [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 684.867432] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg f2729e6c08ca4a57b453d9d638373aee in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 684.875796] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f2729e6c08ca4a57b453d9d638373aee [ 684.918890] env[61273]: DEBUG nova.compute.utils [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 684.919636] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] Expecting reply to msg a1ecdb42114348ac85c3bcc0718b4afc in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 684.935126] env[61273]: DEBUG nova.compute.manager [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] [instance: 297a5546-6159-462c-a436-032d94855c00] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 684.935126] env[61273]: DEBUG nova.network.neutron [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] [instance: 297a5546-6159-462c-a436-032d94855c00] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 684.937805] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a1ecdb42114348ac85c3bcc0718b4afc [ 684.991115] env[61273]: DEBUG nova.policy [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6c24c9f3c3594981b6c9970cd8c8f877', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd9e32c9f8505420595cdd8f18285a33f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 685.037991] env[61273]: DEBUG nova.compute.manager [req-25e9a605-fa39-4b5e-961f-2f6292e729c5 req-5cc734de-7bc5-4033-9e4c-4be0d459ba05 service nova] [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] Received event network-changed-d1633ad7-ab0c-43f3-bf09-b74926bf1ccf {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 685.038187] env[61273]: DEBUG nova.compute.manager [req-25e9a605-fa39-4b5e-961f-2f6292e729c5 req-5cc734de-7bc5-4033-9e4c-4be0d459ba05 service nova] [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] Refreshing instance network info cache due to event network-changed-d1633ad7-ab0c-43f3-bf09-b74926bf1ccf. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 685.038405] env[61273]: DEBUG oslo_concurrency.lockutils [req-25e9a605-fa39-4b5e-961f-2f6292e729c5 req-5cc734de-7bc5-4033-9e4c-4be0d459ba05 service nova] Acquiring lock "refresh_cache-5ea287cd-ba85-446d-85d0-5a050fe49f17" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 685.038550] env[61273]: DEBUG oslo_concurrency.lockutils [req-25e9a605-fa39-4b5e-961f-2f6292e729c5 req-5cc734de-7bc5-4033-9e4c-4be0d459ba05 service nova] Acquired lock "refresh_cache-5ea287cd-ba85-446d-85d0-5a050fe49f17" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 685.038711] env[61273]: DEBUG nova.network.neutron [req-25e9a605-fa39-4b5e-961f-2f6292e729c5 req-5cc734de-7bc5-4033-9e4c-4be0d459ba05 service nova] [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] Refreshing network info cache for port d1633ad7-ab0c-43f3-bf09-b74926bf1ccf {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 685.039141] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-25e9a605-fa39-4b5e-961f-2f6292e729c5 req-5cc734de-7bc5-4033-9e4c-4be0d459ba05 service nova] Expecting reply to msg 2238f0cd7a9349f899b7126a3cdf8509 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 685.046625] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2238f0cd7a9349f899b7126a3cdf8509 [ 685.177683] env[61273]: ERROR nova.compute.manager [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port d1633ad7-ab0c-43f3-bf09-b74926bf1ccf, please check neutron logs for more information. [ 685.177683] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 685.177683] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 685.177683] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 685.177683] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 685.177683] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 685.177683] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 685.177683] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 685.177683] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 685.177683] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 685.177683] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 685.177683] env[61273]: ERROR nova.compute.manager raise self.value [ 685.177683] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 685.177683] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 685.177683] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 685.177683] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 685.178203] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 685.178203] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 685.178203] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port d1633ad7-ab0c-43f3-bf09-b74926bf1ccf, please check neutron logs for more information. [ 685.178203] env[61273]: ERROR nova.compute.manager [ 685.178203] env[61273]: Traceback (most recent call last): [ 685.178203] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 685.178203] env[61273]: listener.cb(fileno) [ 685.178203] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 685.178203] env[61273]: result = function(*args, **kwargs) [ 685.178203] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 685.178203] env[61273]: return func(*args, **kwargs) [ 685.178203] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 685.178203] env[61273]: raise e [ 685.178203] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 685.178203] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 685.178203] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 685.178203] env[61273]: created_port_ids = self._update_ports_for_instance( [ 685.178203] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 685.178203] env[61273]: with excutils.save_and_reraise_exception(): [ 685.178203] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 685.178203] env[61273]: self.force_reraise() [ 685.178203] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 685.178203] env[61273]: raise self.value [ 685.178203] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 685.178203] env[61273]: updated_port = self._update_port( [ 685.178203] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 685.178203] env[61273]: _ensure_no_port_binding_failure(port) [ 685.178203] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 685.178203] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 685.178998] env[61273]: nova.exception.PortBindingFailed: Binding failed for port d1633ad7-ab0c-43f3-bf09-b74926bf1ccf, please check neutron logs for more information. [ 685.178998] env[61273]: Removing descriptor: 15 [ 685.178998] env[61273]: ERROR nova.compute.manager [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port d1633ad7-ab0c-43f3-bf09-b74926bf1ccf, please check neutron logs for more information. [ 685.178998] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] Traceback (most recent call last): [ 685.178998] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 685.178998] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] yield resources [ 685.178998] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 685.178998] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] self.driver.spawn(context, instance, image_meta, [ 685.178998] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 685.178998] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] self._vmops.spawn(context, instance, image_meta, injected_files, [ 685.178998] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 685.178998] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] vm_ref = self.build_virtual_machine(instance, [ 685.179417] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 685.179417] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] vif_infos = vmwarevif.get_vif_info(self._session, [ 685.179417] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 685.179417] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] for vif in network_info: [ 685.179417] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 685.179417] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] return self._sync_wrapper(fn, *args, **kwargs) [ 685.179417] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 685.179417] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] self.wait() [ 685.179417] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 685.179417] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] self[:] = self._gt.wait() [ 685.179417] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 685.179417] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] return self._exit_event.wait() [ 685.179417] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 685.179783] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] result = hub.switch() [ 685.179783] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 685.179783] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] return self.greenlet.switch() [ 685.179783] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 685.179783] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] result = function(*args, **kwargs) [ 685.179783] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 685.179783] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] return func(*args, **kwargs) [ 685.179783] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 685.179783] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] raise e [ 685.179783] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 685.179783] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] nwinfo = self.network_api.allocate_for_instance( [ 685.179783] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 685.179783] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] created_port_ids = self._update_ports_for_instance( [ 685.180159] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 685.180159] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] with excutils.save_and_reraise_exception(): [ 685.180159] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 685.180159] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] self.force_reraise() [ 685.180159] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 685.180159] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] raise self.value [ 685.180159] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 685.180159] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] updated_port = self._update_port( [ 685.180159] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 685.180159] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] _ensure_no_port_binding_failure(port) [ 685.180159] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 685.180159] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] raise exception.PortBindingFailed(port_id=port['id']) [ 685.180491] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] nova.exception.PortBindingFailed: Binding failed for port d1633ad7-ab0c-43f3-bf09-b74926bf1ccf, please check neutron logs for more information. [ 685.180491] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] [ 685.180491] env[61273]: INFO nova.compute.manager [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] Terminating instance [ 685.180961] env[61273]: DEBUG oslo_concurrency.lockutils [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] Acquiring lock "refresh_cache-5ea287cd-ba85-446d-85d0-5a050fe49f17" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 685.352791] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3648b7c-aa0d-4eaf-9745-fdee9d99be5a {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.360621] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-954d75f3-aa57-4b0d-90c3-4944b723533f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.389138] env[61273]: DEBUG nova.network.neutron [-] [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 685.389685] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 059af285b92a420ba0dd2394f6e324c8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 685.391996] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48b220e0-14dc-477d-850f-b2f5dcbc4139 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.406977] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 059af285b92a420ba0dd2394f6e324c8 [ 685.408586] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33a44141-54ad-429b-994a-fd63df60bf05 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.424331] env[61273]: DEBUG nova.compute.provider_tree [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 685.425042] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] Expecting reply to msg c3844ed954474e618fa2442f6b1c7a4b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 685.433825] env[61273]: DEBUG nova.compute.manager [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] [instance: 297a5546-6159-462c-a436-032d94855c00] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 685.435507] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] Expecting reply to msg 24197e5f3856428ca4065f6ea6bc1f7a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 685.443780] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c3844ed954474e618fa2442f6b1c7a4b [ 685.470800] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 24197e5f3856428ca4065f6ea6bc1f7a [ 685.517249] env[61273]: DEBUG nova.network.neutron [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] [instance: 297a5546-6159-462c-a436-032d94855c00] Successfully created port: 230ecbbd-5bca-48b4-9f6f-7b581a49f268 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 685.557011] env[61273]: DEBUG nova.network.neutron [req-25e9a605-fa39-4b5e-961f-2f6292e729c5 req-5cc734de-7bc5-4033-9e4c-4be0d459ba05 service nova] [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 685.636673] env[61273]: DEBUG nova.network.neutron [req-25e9a605-fa39-4b5e-961f-2f6292e729c5 req-5cc734de-7bc5-4033-9e4c-4be0d459ba05 service nova] [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 685.637330] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-25e9a605-fa39-4b5e-961f-2f6292e729c5 req-5cc734de-7bc5-4033-9e4c-4be0d459ba05 service nova] Expecting reply to msg a18ab81f747547069d7536a593fd1e9b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 685.649078] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a18ab81f747547069d7536a593fd1e9b [ 685.904793] env[61273]: INFO nova.compute.manager [-] [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] Took 1.07 seconds to deallocate network for instance. [ 685.907196] env[61273]: DEBUG nova.compute.claims [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 685.907369] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 685.927876] env[61273]: DEBUG nova.scheduler.client.report [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 685.930374] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] Expecting reply to msg 4322aa01bce144ea972efca3a673a352 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 685.944525] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4322aa01bce144ea972efca3a673a352 [ 685.947414] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] Expecting reply to msg 7f658b72c93c4bdea308a852c642dd7c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 685.997412] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7f658b72c93c4bdea308a852c642dd7c [ 686.141300] env[61273]: DEBUG oslo_concurrency.lockutils [req-25e9a605-fa39-4b5e-961f-2f6292e729c5 req-5cc734de-7bc5-4033-9e4c-4be0d459ba05 service nova] Releasing lock "refresh_cache-5ea287cd-ba85-446d-85d0-5a050fe49f17" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 686.141732] env[61273]: DEBUG oslo_concurrency.lockutils [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] Acquired lock "refresh_cache-5ea287cd-ba85-446d-85d0-5a050fe49f17" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 686.141920] env[61273]: DEBUG nova.network.neutron [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 686.142339] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] Expecting reply to msg ba235e9486f24e22bd70a4d8620d8948 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 686.149453] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ba235e9486f24e22bd70a4d8620d8948 [ 686.433374] env[61273]: DEBUG oslo_concurrency.lockutils [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.019s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 686.434024] env[61273]: ERROR nova.compute.manager [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 7dd0ca68-2bda-4db1-8681-659736235fa8, please check neutron logs for more information. [ 686.434024] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] Traceback (most recent call last): [ 686.434024] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 686.434024] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] self.driver.spawn(context, instance, image_meta, [ 686.434024] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 686.434024] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 686.434024] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 686.434024] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] vm_ref = self.build_virtual_machine(instance, [ 686.434024] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 686.434024] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] vif_infos = vmwarevif.get_vif_info(self._session, [ 686.434024] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 686.434362] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] for vif in network_info: [ 686.434362] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 686.434362] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] return self._sync_wrapper(fn, *args, **kwargs) [ 686.434362] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 686.434362] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] self.wait() [ 686.434362] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 686.434362] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] self[:] = self._gt.wait() [ 686.434362] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 686.434362] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] return self._exit_event.wait() [ 686.434362] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 686.434362] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] result = hub.switch() [ 686.434362] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 686.434362] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] return self.greenlet.switch() [ 686.434685] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 686.434685] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] result = function(*args, **kwargs) [ 686.434685] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 686.434685] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] return func(*args, **kwargs) [ 686.434685] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 686.434685] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] raise e [ 686.434685] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 686.434685] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] nwinfo = self.network_api.allocate_for_instance( [ 686.434685] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 686.434685] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] created_port_ids = self._update_ports_for_instance( [ 686.434685] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 686.434685] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] with excutils.save_and_reraise_exception(): [ 686.434685] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 686.435014] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] self.force_reraise() [ 686.435014] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 686.435014] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] raise self.value [ 686.435014] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 686.435014] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] updated_port = self._update_port( [ 686.435014] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 686.435014] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] _ensure_no_port_binding_failure(port) [ 686.435014] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 686.435014] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] raise exception.PortBindingFailed(port_id=port['id']) [ 686.435014] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] nova.exception.PortBindingFailed: Binding failed for port 7dd0ca68-2bda-4db1-8681-659736235fa8, please check neutron logs for more information. [ 686.435014] env[61273]: ERROR nova.compute.manager [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] [ 686.435342] env[61273]: DEBUG nova.compute.utils [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] Binding failed for port 7dd0ca68-2bda-4db1-8681-659736235fa8, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 686.435886] env[61273]: DEBUG oslo_concurrency.lockutils [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.543s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 686.437295] env[61273]: INFO nova.compute.claims [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 686.438826] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Expecting reply to msg 08dd918ef4da46fe80c4803cfa0efbc9 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 686.440970] env[61273]: DEBUG nova.compute.manager [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] Build of instance b41c6d21-5e7f-427f-95ce-830fe0da8bc6 was re-scheduled: Binding failed for port 7dd0ca68-2bda-4db1-8681-659736235fa8, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 686.441534] env[61273]: DEBUG nova.compute.manager [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 686.441848] env[61273]: DEBUG oslo_concurrency.lockutils [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] Acquiring lock "refresh_cache-b41c6d21-5e7f-427f-95ce-830fe0da8bc6" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 686.442046] env[61273]: DEBUG oslo_concurrency.lockutils [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] Acquired lock "refresh_cache-b41c6d21-5e7f-427f-95ce-830fe0da8bc6" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 686.442256] env[61273]: DEBUG nova.network.neutron [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 686.442683] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] Expecting reply to msg af68fe6c4d124d0f83e23a0e6c12ec10 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 686.449008] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg af68fe6c4d124d0f83e23a0e6c12ec10 [ 686.450276] env[61273]: DEBUG nova.compute.manager [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] [instance: 297a5546-6159-462c-a436-032d94855c00] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 686.474876] env[61273]: DEBUG nova.virt.hardware [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 686.475200] env[61273]: DEBUG nova.virt.hardware [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 686.475414] env[61273]: DEBUG nova.virt.hardware [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 686.475686] env[61273]: DEBUG nova.virt.hardware [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 686.475890] env[61273]: DEBUG nova.virt.hardware [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 686.476131] env[61273]: DEBUG nova.virt.hardware [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 686.476392] env[61273]: DEBUG nova.virt.hardware [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 686.476601] env[61273]: DEBUG nova.virt.hardware [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 686.476842] env[61273]: DEBUG nova.virt.hardware [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 686.477033] env[61273]: DEBUG nova.virt.hardware [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 686.477259] env[61273]: DEBUG nova.virt.hardware [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 686.478151] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a431910-adc0-4137-a7a9-2e2bf1bc605d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.481479] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 08dd918ef4da46fe80c4803cfa0efbc9 [ 686.488270] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65e52d6d-ec9c-472a-ae5c-8c71a4ac7358 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.663246] env[61273]: DEBUG nova.network.neutron [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 686.761235] env[61273]: DEBUG nova.network.neutron [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 686.761774] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] Expecting reply to msg d3adec6217e54368aa9a88efc44e3bb7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 686.776055] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d3adec6217e54368aa9a88efc44e3bb7 [ 686.943406] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Expecting reply to msg 17b3512161e04239b97c9a13882333f7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 686.956372] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 17b3512161e04239b97c9a13882333f7 [ 686.963357] env[61273]: DEBUG nova.compute.manager [req-01271fa8-a5fd-4f1a-8a79-cf8cf5a101de req-016dc8f4-2756-4e08-b946-54bf098a21a6 service nova] [instance: 297a5546-6159-462c-a436-032d94855c00] Received event network-changed-230ecbbd-5bca-48b4-9f6f-7b581a49f268 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 686.963578] env[61273]: DEBUG nova.compute.manager [req-01271fa8-a5fd-4f1a-8a79-cf8cf5a101de req-016dc8f4-2756-4e08-b946-54bf098a21a6 service nova] [instance: 297a5546-6159-462c-a436-032d94855c00] Refreshing instance network info cache due to event network-changed-230ecbbd-5bca-48b4-9f6f-7b581a49f268. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 686.963739] env[61273]: DEBUG oslo_concurrency.lockutils [req-01271fa8-a5fd-4f1a-8a79-cf8cf5a101de req-016dc8f4-2756-4e08-b946-54bf098a21a6 service nova] Acquiring lock "refresh_cache-297a5546-6159-462c-a436-032d94855c00" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 686.964142] env[61273]: DEBUG oslo_concurrency.lockutils [req-01271fa8-a5fd-4f1a-8a79-cf8cf5a101de req-016dc8f4-2756-4e08-b946-54bf098a21a6 service nova] Acquired lock "refresh_cache-297a5546-6159-462c-a436-032d94855c00" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 686.964142] env[61273]: DEBUG nova.network.neutron [req-01271fa8-a5fd-4f1a-8a79-cf8cf5a101de req-016dc8f4-2756-4e08-b946-54bf098a21a6 service nova] [instance: 297a5546-6159-462c-a436-032d94855c00] Refreshing network info cache for port 230ecbbd-5bca-48b4-9f6f-7b581a49f268 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 686.964456] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-01271fa8-a5fd-4f1a-8a79-cf8cf5a101de req-016dc8f4-2756-4e08-b946-54bf098a21a6 service nova] Expecting reply to msg d921daaceb69454cb6cdd97981310522 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 686.971485] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d921daaceb69454cb6cdd97981310522 [ 686.977087] env[61273]: DEBUG nova.network.neutron [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 687.050359] env[61273]: DEBUG nova.network.neutron [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 687.050998] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] Expecting reply to msg f5e68643562044248d8cdd48d4df6787 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 687.061135] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f5e68643562044248d8cdd48d4df6787 [ 687.066078] env[61273]: DEBUG nova.compute.manager [req-1b019044-e52b-4018-ab5c-26c009c48d9a req-7cdf2687-23f4-451e-b907-483b35e8f31b service nova] [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] Received event network-vif-deleted-d1633ad7-ab0c-43f3-bf09-b74926bf1ccf {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 687.215465] env[61273]: ERROR nova.compute.manager [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 230ecbbd-5bca-48b4-9f6f-7b581a49f268, please check neutron logs for more information. [ 687.215465] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 687.215465] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 687.215465] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 687.215465] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 687.215465] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 687.215465] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 687.215465] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 687.215465] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 687.215465] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 687.215465] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 687.215465] env[61273]: ERROR nova.compute.manager raise self.value [ 687.215465] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 687.215465] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 687.215465] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 687.215465] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 687.215998] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 687.215998] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 687.215998] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 230ecbbd-5bca-48b4-9f6f-7b581a49f268, please check neutron logs for more information. [ 687.215998] env[61273]: ERROR nova.compute.manager [ 687.215998] env[61273]: Traceback (most recent call last): [ 687.215998] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 687.215998] env[61273]: listener.cb(fileno) [ 687.215998] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 687.215998] env[61273]: result = function(*args, **kwargs) [ 687.215998] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 687.215998] env[61273]: return func(*args, **kwargs) [ 687.215998] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 687.215998] env[61273]: raise e [ 687.215998] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 687.215998] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 687.215998] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 687.215998] env[61273]: created_port_ids = self._update_ports_for_instance( [ 687.215998] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 687.215998] env[61273]: with excutils.save_and_reraise_exception(): [ 687.215998] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 687.215998] env[61273]: self.force_reraise() [ 687.215998] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 687.215998] env[61273]: raise self.value [ 687.215998] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 687.215998] env[61273]: updated_port = self._update_port( [ 687.215998] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 687.215998] env[61273]: _ensure_no_port_binding_failure(port) [ 687.215998] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 687.215998] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 687.216904] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 230ecbbd-5bca-48b4-9f6f-7b581a49f268, please check neutron logs for more information. [ 687.216904] env[61273]: Removing descriptor: 19 [ 687.216904] env[61273]: ERROR nova.compute.manager [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] [instance: 297a5546-6159-462c-a436-032d94855c00] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 230ecbbd-5bca-48b4-9f6f-7b581a49f268, please check neutron logs for more information. [ 687.216904] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] Traceback (most recent call last): [ 687.216904] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 687.216904] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] yield resources [ 687.216904] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 687.216904] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] self.driver.spawn(context, instance, image_meta, [ 687.216904] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 687.216904] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] self._vmops.spawn(context, instance, image_meta, injected_files, [ 687.216904] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 687.216904] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] vm_ref = self.build_virtual_machine(instance, [ 687.217272] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 687.217272] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] vif_infos = vmwarevif.get_vif_info(self._session, [ 687.217272] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 687.217272] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] for vif in network_info: [ 687.217272] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 687.217272] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] return self._sync_wrapper(fn, *args, **kwargs) [ 687.217272] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 687.217272] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] self.wait() [ 687.217272] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 687.217272] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] self[:] = self._gt.wait() [ 687.217272] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 687.217272] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] return self._exit_event.wait() [ 687.217272] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 687.217659] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] result = hub.switch() [ 687.217659] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 687.217659] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] return self.greenlet.switch() [ 687.217659] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 687.217659] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] result = function(*args, **kwargs) [ 687.217659] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 687.217659] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] return func(*args, **kwargs) [ 687.217659] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 687.217659] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] raise e [ 687.217659] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 687.217659] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] nwinfo = self.network_api.allocate_for_instance( [ 687.217659] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 687.217659] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] created_port_ids = self._update_ports_for_instance( [ 687.218228] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 687.218228] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] with excutils.save_and_reraise_exception(): [ 687.218228] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 687.218228] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] self.force_reraise() [ 687.218228] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 687.218228] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] raise self.value [ 687.218228] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 687.218228] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] updated_port = self._update_port( [ 687.218228] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 687.218228] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] _ensure_no_port_binding_failure(port) [ 687.218228] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 687.218228] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] raise exception.PortBindingFailed(port_id=port['id']) [ 687.218587] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] nova.exception.PortBindingFailed: Binding failed for port 230ecbbd-5bca-48b4-9f6f-7b581a49f268, please check neutron logs for more information. [ 687.218587] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] [ 687.218587] env[61273]: INFO nova.compute.manager [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] [instance: 297a5546-6159-462c-a436-032d94855c00] Terminating instance [ 687.218754] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] Acquiring lock "refresh_cache-297a5546-6159-462c-a436-032d94855c00" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 687.270510] env[61273]: DEBUG oslo_concurrency.lockutils [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] Releasing lock "refresh_cache-5ea287cd-ba85-446d-85d0-5a050fe49f17" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 687.270957] env[61273]: DEBUG nova.compute.manager [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 687.271155] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 687.271464] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-207d0bd4-1690-496c-bddf-e34a0f1fd358 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.280583] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e1f3bf2-9241-4d07-8980-eb3468ca4496 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.302221] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5ea287cd-ba85-446d-85d0-5a050fe49f17 could not be found. [ 687.302455] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 687.302642] env[61273]: INFO nova.compute.manager [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] Took 0.03 seconds to destroy the instance on the hypervisor. [ 687.302902] env[61273]: DEBUG oslo.service.loopingcall [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 687.303133] env[61273]: DEBUG nova.compute.manager [-] [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 687.303227] env[61273]: DEBUG nova.network.neutron [-] [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 687.330653] env[61273]: DEBUG nova.network.neutron [-] [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 687.331196] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 6713cf26f095445fb0ede6e929c033ef in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 687.340134] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6713cf26f095445fb0ede6e929c033ef [ 687.496567] env[61273]: DEBUG nova.network.neutron [req-01271fa8-a5fd-4f1a-8a79-cf8cf5a101de req-016dc8f4-2756-4e08-b946-54bf098a21a6 service nova] [instance: 297a5546-6159-462c-a436-032d94855c00] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 687.552847] env[61273]: DEBUG oslo_concurrency.lockutils [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] Releasing lock "refresh_cache-b41c6d21-5e7f-427f-95ce-830fe0da8bc6" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 687.553003] env[61273]: DEBUG nova.compute.manager [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 687.553166] env[61273]: DEBUG nova.compute.manager [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 687.553360] env[61273]: DEBUG nova.network.neutron [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 687.575054] env[61273]: DEBUG nova.network.neutron [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 687.575632] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] Expecting reply to msg c75a0409cb8f4aebb54fedd54e4c069f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 687.582215] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c75a0409cb8f4aebb54fedd54e4c069f [ 687.651243] env[61273]: DEBUG nova.network.neutron [req-01271fa8-a5fd-4f1a-8a79-cf8cf5a101de req-016dc8f4-2756-4e08-b946-54bf098a21a6 service nova] [instance: 297a5546-6159-462c-a436-032d94855c00] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 687.651803] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-01271fa8-a5fd-4f1a-8a79-cf8cf5a101de req-016dc8f4-2756-4e08-b946-54bf098a21a6 service nova] Expecting reply to msg 2c2102c1651e4732b544a4969a7f5ca5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 687.660659] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2c2102c1651e4732b544a4969a7f5ca5 [ 687.793877] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ee20bbe-b72c-456c-81b3-4baabc8dd94c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.803074] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b1c8dd4-30fc-43a9-be6c-823e6eff6f1e {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.837184] env[61273]: DEBUG nova.network.neutron [-] [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 687.838152] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 24d0a252de0c4ec5a3628ac522434a30 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 687.840893] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c19d3ef-c3ec-44aa-bd4f-d97a91ab61a8 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.848968] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59203034-6a72-4336-a314-59dee8013f61 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.853585] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 24d0a252de0c4ec5a3628ac522434a30 [ 687.865158] env[61273]: DEBUG nova.compute.provider_tree [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 687.865875] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Expecting reply to msg d4bb70dbc9054f4b9372f664b0675338 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 687.875223] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d4bb70dbc9054f4b9372f664b0675338 [ 688.078209] env[61273]: DEBUG nova.network.neutron [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 688.079182] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] Expecting reply to msg d535e38762c5476faee5841b3d5099be in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 688.088431] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d535e38762c5476faee5841b3d5099be [ 688.153741] env[61273]: DEBUG oslo_concurrency.lockutils [req-01271fa8-a5fd-4f1a-8a79-cf8cf5a101de req-016dc8f4-2756-4e08-b946-54bf098a21a6 service nova] Releasing lock "refresh_cache-297a5546-6159-462c-a436-032d94855c00" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 688.154192] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] Acquired lock "refresh_cache-297a5546-6159-462c-a436-032d94855c00" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 688.154374] env[61273]: DEBUG nova.network.neutron [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] [instance: 297a5546-6159-462c-a436-032d94855c00] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 688.154800] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] Expecting reply to msg c13dde47a24d4f7ebd1b4f0e1a013571 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 688.165348] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c13dde47a24d4f7ebd1b4f0e1a013571 [ 688.340492] env[61273]: INFO nova.compute.manager [-] [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] Took 1.04 seconds to deallocate network for instance. [ 688.342937] env[61273]: DEBUG nova.compute.claims [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 688.343118] env[61273]: DEBUG oslo_concurrency.lockutils [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 688.369264] env[61273]: DEBUG nova.scheduler.client.report [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 688.372020] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Expecting reply to msg eaa51a1add3d423bac7466df5a91f212 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 688.385079] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eaa51a1add3d423bac7466df5a91f212 [ 688.588233] env[61273]: INFO nova.compute.manager [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] [instance: b41c6d21-5e7f-427f-95ce-830fe0da8bc6] Took 1.03 seconds to deallocate network for instance. [ 688.588233] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] Expecting reply to msg 61d52e5e237f4520bcb12995da0fc703 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 688.588233] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Acquiring lock "7bfdc548-4f10-4525-9ea1-3781f90ca81d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 688.588233] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Lock "7bfdc548-4f10-4525-9ea1-3781f90ca81d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 688.615539] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 61d52e5e237f4520bcb12995da0fc703 [ 688.672806] env[61273]: DEBUG nova.network.neutron [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] [instance: 297a5546-6159-462c-a436-032d94855c00] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 688.775103] env[61273]: DEBUG nova.network.neutron [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] [instance: 297a5546-6159-462c-a436-032d94855c00] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 688.775649] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] Expecting reply to msg 570abd12a0dd4894af790b27d5d6da66 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 688.783854] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 570abd12a0dd4894af790b27d5d6da66 [ 688.874818] env[61273]: DEBUG oslo_concurrency.lockutils [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.439s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 688.875352] env[61273]: DEBUG nova.compute.manager [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 688.877147] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Expecting reply to msg fd441df7a1474c0d9e520331df907deb in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 688.878165] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d1819932-15b9-4a4d-a680-d9adfa2a43ab tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.170s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 688.878384] env[61273]: DEBUG nova.objects.instance [None req-d1819932-15b9-4a4d-a680-d9adfa2a43ab tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Lazy-loading 'resources' on Instance uuid 109fc11e-d640-4617-99a3-0defe0a5aa6c {{(pid=61273) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 688.878690] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d1819932-15b9-4a4d-a680-d9adfa2a43ab tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Expecting reply to msg af3f50a19b414ef592cd3e789ea0106a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 688.894008] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg af3f50a19b414ef592cd3e789ea0106a [ 688.911780] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fd441df7a1474c0d9e520331df907deb [ 688.998083] env[61273]: DEBUG nova.compute.manager [req-8e96b569-e2ff-40ef-8f31-25244dd3f951 req-b17f0530-3b76-4f23-b77a-810100ca449a service nova] [instance: 297a5546-6159-462c-a436-032d94855c00] Received event network-vif-deleted-230ecbbd-5bca-48b4-9f6f-7b581a49f268 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 689.087575] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] Expecting reply to msg 44da0567f2b54c05a64f712ff6ab0939 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 689.133398] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 44da0567f2b54c05a64f712ff6ab0939 [ 689.278104] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] Releasing lock "refresh_cache-297a5546-6159-462c-a436-032d94855c00" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 689.278541] env[61273]: DEBUG nova.compute.manager [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] [instance: 297a5546-6159-462c-a436-032d94855c00] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 689.278737] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] [instance: 297a5546-6159-462c-a436-032d94855c00] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 689.279045] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2d59fc5b-2dfd-48c1-8a7e-f7f7754313a8 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.288441] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cde0142-9435-45e1-bbf1-c9b89ebffb2b {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.312140] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] [instance: 297a5546-6159-462c-a436-032d94855c00] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 297a5546-6159-462c-a436-032d94855c00 could not be found. [ 689.312384] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] [instance: 297a5546-6159-462c-a436-032d94855c00] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 689.312572] env[61273]: INFO nova.compute.manager [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] [instance: 297a5546-6159-462c-a436-032d94855c00] Took 0.03 seconds to destroy the instance on the hypervisor. [ 689.312834] env[61273]: DEBUG oslo.service.loopingcall [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 689.313069] env[61273]: DEBUG nova.compute.manager [-] [instance: 297a5546-6159-462c-a436-032d94855c00] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 689.313163] env[61273]: DEBUG nova.network.neutron [-] [instance: 297a5546-6159-462c-a436-032d94855c00] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 689.345859] env[61273]: DEBUG nova.network.neutron [-] [instance: 297a5546-6159-462c-a436-032d94855c00] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 689.346377] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg a3706e8c626242358b73fb1895f60a9a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 689.353737] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a3706e8c626242358b73fb1895f60a9a [ 689.381745] env[61273]: DEBUG nova.compute.utils [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 689.382545] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Expecting reply to msg 5fe8321a3f3c4c3ba4ef0ded8aeb872f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 689.383421] env[61273]: DEBUG nova.compute.manager [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 689.392020] env[61273]: DEBUG nova.network.neutron [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 689.398975] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5fe8321a3f3c4c3ba4ef0ded8aeb872f [ 689.468286] env[61273]: DEBUG nova.policy [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6116fae5f64e41e09dd7d6f6fcab88dd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '67f289a0c54e46e18b97b47c11a9b1f5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 689.609335] env[61273]: INFO nova.scheduler.client.report [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] Deleted allocations for instance b41c6d21-5e7f-427f-95ce-830fe0da8bc6 [ 689.620343] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] Expecting reply to msg aa4a68a690e940e981e7b438d1fc2729 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 689.639413] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aa4a68a690e940e981e7b438d1fc2729 [ 689.771486] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b79f4757-1ddf-4225-878a-8f4cfc7bd223 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.781378] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24a61aa5-3168-4d3c-b685-fcf463142725 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.815144] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-194cd534-6a6f-4928-afb7-e1e377d1272a {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.824047] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad0e1e09-d361-4219-9798-bce3c86a2624 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.840043] env[61273]: DEBUG nova.compute.provider_tree [None req-d1819932-15b9-4a4d-a680-d9adfa2a43ab tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 689.840646] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d1819932-15b9-4a4d-a680-d9adfa2a43ab tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Expecting reply to msg 7690ce33954a4fdd86f848fd1a833160 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 689.848103] env[61273]: DEBUG nova.network.neutron [-] [instance: 297a5546-6159-462c-a436-032d94855c00] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 689.848549] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 5d5cbcd122fc49128a37eeb78cd6482f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 689.856849] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5d5cbcd122fc49128a37eeb78cd6482f [ 689.857847] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7690ce33954a4fdd86f848fd1a833160 [ 689.869929] env[61273]: DEBUG nova.network.neutron [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] Successfully created port: ea7c2700-8347-4baa-b73d-d330af13f7ab {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 689.886086] env[61273]: DEBUG nova.compute.manager [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 689.887746] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Expecting reply to msg 8bd8906c3c3243e0ac1262d6c0bb2a51 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 689.930336] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8bd8906c3c3243e0ac1262d6c0bb2a51 [ 690.121981] env[61273]: DEBUG oslo_concurrency.lockutils [None req-70a0794c-8d68-4920-918c-537e6e9bbf23 tempest-TenantUsagesTestJSON-1072048346 tempest-TenantUsagesTestJSON-1072048346-project-member] Lock "b41c6d21-5e7f-427f-95ce-830fe0da8bc6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 121.610s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 690.122601] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] Expecting reply to msg eb4600d82bfb4bfcab08a7a00e3a4b7f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 690.137293] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eb4600d82bfb4bfcab08a7a00e3a4b7f [ 690.343048] env[61273]: DEBUG nova.scheduler.client.report [None req-d1819932-15b9-4a4d-a680-d9adfa2a43ab tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 690.345528] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d1819932-15b9-4a4d-a680-d9adfa2a43ab tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Expecting reply to msg 7c1a8954e21743869f3a68ebe24733a3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 690.350884] env[61273]: INFO nova.compute.manager [-] [instance: 297a5546-6159-462c-a436-032d94855c00] Took 1.04 seconds to deallocate network for instance. [ 690.352582] env[61273]: DEBUG nova.compute.claims [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] [instance: 297a5546-6159-462c-a436-032d94855c00] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 690.352778] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 690.358837] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7c1a8954e21743869f3a68ebe24733a3 [ 690.392283] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Expecting reply to msg 7fb4d6458f0047508da5dc3c1a0f893b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 690.425658] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7fb4d6458f0047508da5dc3c1a0f893b [ 690.624712] env[61273]: DEBUG nova.compute.manager [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] [instance: 23774aa5-1608-495f-8015-29e25f856c69] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 690.626479] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] Expecting reply to msg c70e5d8913f44187aea954a6abe47de7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 690.659268] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c70e5d8913f44187aea954a6abe47de7 [ 690.847989] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d1819932-15b9-4a4d-a680-d9adfa2a43ab tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.970s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 690.850331] env[61273]: DEBUG oslo_concurrency.lockutils [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.954s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 690.851860] env[61273]: INFO nova.compute.claims [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 690.853417] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] Expecting reply to msg b5908e3bf331466e91344daabede2296 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 690.873368] env[61273]: INFO nova.scheduler.client.report [None req-d1819932-15b9-4a4d-a680-d9adfa2a43ab tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Deleted allocations for instance 109fc11e-d640-4617-99a3-0defe0a5aa6c [ 690.876957] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d1819932-15b9-4a4d-a680-d9adfa2a43ab tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Expecting reply to msg 3a6906d868aa44ed9886a506da2323df in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 690.895735] env[61273]: DEBUG nova.compute.manager [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 690.919011] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b5908e3bf331466e91344daabede2296 [ 690.922531] env[61273]: DEBUG nova.virt.hardware [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:34:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='fe21652d-3c21-48d1-9a85-72f8506b2168',id=38,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1286609570',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 690.922800] env[61273]: DEBUG nova.virt.hardware [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 690.922962] env[61273]: DEBUG nova.virt.hardware [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 690.923144] env[61273]: DEBUG nova.virt.hardware [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 690.923287] env[61273]: DEBUG nova.virt.hardware [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 690.923430] env[61273]: DEBUG nova.virt.hardware [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 690.923639] env[61273]: DEBUG nova.virt.hardware [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 690.923797] env[61273]: DEBUG nova.virt.hardware [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 690.923959] env[61273]: DEBUG nova.virt.hardware [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 690.924142] env[61273]: DEBUG nova.virt.hardware [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 690.924317] env[61273]: DEBUG nova.virt.hardware [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 690.925555] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3f4a48b-ac8a-48ec-a82b-3512b0ff7a90 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.935826] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ea2466a-a321-479a-953e-ca9964c5525f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.945814] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3a6906d868aa44ed9886a506da2323df [ 691.029140] env[61273]: ERROR nova.compute.manager [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port ea7c2700-8347-4baa-b73d-d330af13f7ab, please check neutron logs for more information. [ 691.029140] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 691.029140] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 691.029140] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 691.029140] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 691.029140] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 691.029140] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 691.029140] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 691.029140] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 691.029140] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 691.029140] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 691.029140] env[61273]: ERROR nova.compute.manager raise self.value [ 691.029140] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 691.029140] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 691.029140] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 691.029140] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 691.029694] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 691.029694] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 691.029694] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port ea7c2700-8347-4baa-b73d-d330af13f7ab, please check neutron logs for more information. [ 691.029694] env[61273]: ERROR nova.compute.manager [ 691.029694] env[61273]: Traceback (most recent call last): [ 691.029694] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 691.029694] env[61273]: listener.cb(fileno) [ 691.029694] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 691.029694] env[61273]: result = function(*args, **kwargs) [ 691.029694] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 691.029694] env[61273]: return func(*args, **kwargs) [ 691.029694] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 691.029694] env[61273]: raise e [ 691.029694] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 691.029694] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 691.029694] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 691.029694] env[61273]: created_port_ids = self._update_ports_for_instance( [ 691.029694] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 691.029694] env[61273]: with excutils.save_and_reraise_exception(): [ 691.029694] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 691.029694] env[61273]: self.force_reraise() [ 691.029694] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 691.029694] env[61273]: raise self.value [ 691.029694] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 691.029694] env[61273]: updated_port = self._update_port( [ 691.029694] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 691.029694] env[61273]: _ensure_no_port_binding_failure(port) [ 691.029694] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 691.029694] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 691.030545] env[61273]: nova.exception.PortBindingFailed: Binding failed for port ea7c2700-8347-4baa-b73d-d330af13f7ab, please check neutron logs for more information. [ 691.030545] env[61273]: Removing descriptor: 19 [ 691.030545] env[61273]: ERROR nova.compute.manager [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port ea7c2700-8347-4baa-b73d-d330af13f7ab, please check neutron logs for more information. [ 691.030545] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] Traceback (most recent call last): [ 691.030545] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 691.030545] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] yield resources [ 691.030545] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 691.030545] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] self.driver.spawn(context, instance, image_meta, [ 691.030545] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 691.030545] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 691.030545] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 691.030545] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] vm_ref = self.build_virtual_machine(instance, [ 691.030937] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 691.030937] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] vif_infos = vmwarevif.get_vif_info(self._session, [ 691.030937] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 691.030937] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] for vif in network_info: [ 691.030937] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 691.030937] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] return self._sync_wrapper(fn, *args, **kwargs) [ 691.030937] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 691.030937] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] self.wait() [ 691.030937] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 691.030937] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] self[:] = self._gt.wait() [ 691.030937] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 691.030937] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] return self._exit_event.wait() [ 691.030937] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 691.031338] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] result = hub.switch() [ 691.031338] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 691.031338] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] return self.greenlet.switch() [ 691.031338] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 691.031338] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] result = function(*args, **kwargs) [ 691.031338] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 691.031338] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] return func(*args, **kwargs) [ 691.031338] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 691.031338] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] raise e [ 691.031338] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 691.031338] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] nwinfo = self.network_api.allocate_for_instance( [ 691.031338] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 691.031338] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] created_port_ids = self._update_ports_for_instance( [ 691.031746] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 691.031746] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] with excutils.save_and_reraise_exception(): [ 691.031746] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 691.031746] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] self.force_reraise() [ 691.031746] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 691.031746] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] raise self.value [ 691.031746] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 691.031746] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] updated_port = self._update_port( [ 691.031746] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 691.031746] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] _ensure_no_port_binding_failure(port) [ 691.031746] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 691.031746] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] raise exception.PortBindingFailed(port_id=port['id']) [ 691.032128] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] nova.exception.PortBindingFailed: Binding failed for port ea7c2700-8347-4baa-b73d-d330af13f7ab, please check neutron logs for more information. [ 691.032128] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] [ 691.032128] env[61273]: INFO nova.compute.manager [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] Terminating instance [ 691.032882] env[61273]: DEBUG oslo_concurrency.lockutils [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Acquiring lock "refresh_cache-2b87dfbe-2b94-4787-a795-94f8b63f651c" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 691.032882] env[61273]: DEBUG oslo_concurrency.lockutils [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Acquired lock "refresh_cache-2b87dfbe-2b94-4787-a795-94f8b63f651c" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 691.032882] env[61273]: DEBUG nova.network.neutron [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 691.033323] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Expecting reply to msg 08b6dbfc615c45a28f916b94cac9338d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 691.044257] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 08b6dbfc615c45a28f916b94cac9338d [ 691.145181] env[61273]: DEBUG oslo_concurrency.lockutils [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 691.163097] env[61273]: DEBUG nova.compute.manager [req-f12f7b21-a081-447d-b6f5-86741d868ab7 req-8f105141-954d-43ef-b4c5-e14c9f4001f4 service nova] [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] Received event network-changed-ea7c2700-8347-4baa-b73d-d330af13f7ab {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 691.163342] env[61273]: DEBUG nova.compute.manager [req-f12f7b21-a081-447d-b6f5-86741d868ab7 req-8f105141-954d-43ef-b4c5-e14c9f4001f4 service nova] [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] Refreshing instance network info cache due to event network-changed-ea7c2700-8347-4baa-b73d-d330af13f7ab. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 691.163544] env[61273]: DEBUG oslo_concurrency.lockutils [req-f12f7b21-a081-447d-b6f5-86741d868ab7 req-8f105141-954d-43ef-b4c5-e14c9f4001f4 service nova] Acquiring lock "refresh_cache-2b87dfbe-2b94-4787-a795-94f8b63f651c" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 691.357411] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] Expecting reply to msg e0af3aff5d704e38bbe80cf6b5ba2d5d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 691.365911] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e0af3aff5d704e38bbe80cf6b5ba2d5d [ 691.382531] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d1819932-15b9-4a4d-a680-d9adfa2a43ab tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Lock "109fc11e-d640-4617-99a3-0defe0a5aa6c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.418s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 691.382878] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d1819932-15b9-4a4d-a680-d9adfa2a43ab tempest-ServersAaction247Test-1023329623 tempest-ServersAaction247Test-1023329623-project-member] Expecting reply to msg b9a99d24aefe4d84adbcc02e391a734e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 691.399286] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b9a99d24aefe4d84adbcc02e391a734e [ 691.558026] env[61273]: DEBUG nova.network.neutron [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 691.609874] env[61273]: DEBUG nova.network.neutron [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 691.610435] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Expecting reply to msg 99bd9de674e441339a7fce73c2d00e89 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 691.618578] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 99bd9de674e441339a7fce73c2d00e89 [ 692.113987] env[61273]: DEBUG oslo_concurrency.lockutils [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Releasing lock "refresh_cache-2b87dfbe-2b94-4787-a795-94f8b63f651c" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 692.114405] env[61273]: DEBUG nova.compute.manager [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 692.114603] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 692.115106] env[61273]: DEBUG oslo_concurrency.lockutils [req-f12f7b21-a081-447d-b6f5-86741d868ab7 req-8f105141-954d-43ef-b4c5-e14c9f4001f4 service nova] Acquired lock "refresh_cache-2b87dfbe-2b94-4787-a795-94f8b63f651c" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 692.115288] env[61273]: DEBUG nova.network.neutron [req-f12f7b21-a081-447d-b6f5-86741d868ab7 req-8f105141-954d-43ef-b4c5-e14c9f4001f4 service nova] [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] Refreshing network info cache for port ea7c2700-8347-4baa-b73d-d330af13f7ab {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 692.115710] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-f12f7b21-a081-447d-b6f5-86741d868ab7 req-8f105141-954d-43ef-b4c5-e14c9f4001f4 service nova] Expecting reply to msg b69b3cc60f2447c093d91f3ed4f82540 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 692.116553] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9d6ca57c-f885-4d65-a608-f78601931fbf {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.125520] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b69b3cc60f2447c093d91f3ed4f82540 [ 692.136784] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6157c59d-fce6-4d59-bb36-3ba137fb5649 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.160312] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2b87dfbe-2b94-4787-a795-94f8b63f651c could not be found. [ 692.160872] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 692.161080] env[61273]: INFO nova.compute.manager [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] Took 0.05 seconds to destroy the instance on the hypervisor. [ 692.161418] env[61273]: DEBUG oslo.service.loopingcall [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 692.163806] env[61273]: DEBUG nova.compute.manager [-] [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 692.163911] env[61273]: DEBUG nova.network.neutron [-] [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 692.191458] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-599ec2fd-dbfa-4231-9071-84115efb5e0d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.199075] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a54d25c6-91d8-4176-a193-aab51626a269 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.234297] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a138b2e5-190d-4515-bb95-0673b1753314 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.242280] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f81d81f-2198-4035-ab6e-50fb9f69cefd {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.258997] env[61273]: DEBUG nova.compute.provider_tree [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 692.259810] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] Expecting reply to msg 31fdd16e93404b9e9d0b12f2f24fc8b1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 692.267228] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 31fdd16e93404b9e9d0b12f2f24fc8b1 [ 692.347721] env[61273]: DEBUG nova.network.neutron [-] [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 692.348373] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 6b95e6406e3d45f2af4482891ee643b0 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 692.358373] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6b95e6406e3d45f2af4482891ee643b0 [ 692.652822] env[61273]: DEBUG nova.network.neutron [req-f12f7b21-a081-447d-b6f5-86741d868ab7 req-8f105141-954d-43ef-b4c5-e14c9f4001f4 service nova] [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 692.722949] env[61273]: DEBUG nova.network.neutron [req-f12f7b21-a081-447d-b6f5-86741d868ab7 req-8f105141-954d-43ef-b4c5-e14c9f4001f4 service nova] [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 692.723508] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-f12f7b21-a081-447d-b6f5-86741d868ab7 req-8f105141-954d-43ef-b4c5-e14c9f4001f4 service nova] Expecting reply to msg 9ca3fa6362074161a42814e95f3f718f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 692.732189] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9ca3fa6362074161a42814e95f3f718f [ 692.762797] env[61273]: DEBUG nova.scheduler.client.report [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 692.765323] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] Expecting reply to msg 2e7810990d284fd6846ab07b461ada10 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 692.777932] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2e7810990d284fd6846ab07b461ada10 [ 692.850514] env[61273]: DEBUG nova.network.neutron [-] [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 692.851016] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 7dc8cad32bf44be694876c93be828cdf in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 692.864530] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7dc8cad32bf44be694876c93be828cdf [ 693.196487] env[61273]: DEBUG nova.compute.manager [req-771b4895-3181-4bf2-85df-3cb46b731eb8 req-d31b24d4-0ce3-45e0-a58e-c2d9dc9b4873 service nova] [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] Received event network-vif-deleted-ea7c2700-8347-4baa-b73d-d330af13f7ab {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 693.225903] env[61273]: DEBUG oslo_concurrency.lockutils [req-f12f7b21-a081-447d-b6f5-86741d868ab7 req-8f105141-954d-43ef-b4c5-e14c9f4001f4 service nova] Releasing lock "refresh_cache-2b87dfbe-2b94-4787-a795-94f8b63f651c" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 693.268060] env[61273]: DEBUG oslo_concurrency.lockutils [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.418s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 693.268598] env[61273]: DEBUG nova.compute.manager [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 693.270289] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] Expecting reply to msg 3f9e7102ceeb41f1bd4a3c0ce8823bc8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 693.271385] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.320s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 693.273230] env[61273]: INFO nova.compute.claims [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 693.274685] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Expecting reply to msg 76d67e7670ac4ea2b2c1809bb7a96da3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 693.333755] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3f9e7102ceeb41f1bd4a3c0ce8823bc8 [ 693.343357] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 76d67e7670ac4ea2b2c1809bb7a96da3 [ 693.353867] env[61273]: INFO nova.compute.manager [-] [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] Took 1.19 seconds to deallocate network for instance. [ 693.356060] env[61273]: DEBUG nova.compute.claims [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 693.356234] env[61273]: DEBUG oslo_concurrency.lockutils [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 693.777443] env[61273]: DEBUG nova.compute.utils [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 693.778131] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] Expecting reply to msg 75dea2afb2e54434bd2678eb2c991332 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 693.780202] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Expecting reply to msg eabaafd1ef63475caf86958db56daa9e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 693.781150] env[61273]: DEBUG nova.compute.manager [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 693.781423] env[61273]: DEBUG nova.network.neutron [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 693.788654] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eabaafd1ef63475caf86958db56daa9e [ 693.801319] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 75dea2afb2e54434bd2678eb2c991332 [ 693.848617] env[61273]: DEBUG nova.policy [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dca60306b0944a8ba1bfd91c00439602', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f2a584ced06549eaa9449580059e9b1c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 694.199478] env[61273]: DEBUG nova.network.neutron [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] Successfully created port: 9d5ceb62-1624-4a37-9b8b-42997a4fb38c {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 694.282038] env[61273]: DEBUG nova.compute.manager [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 694.283797] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] Expecting reply to msg aedd4dd9d4154f6baf508cd3d1fc46b9 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 694.357647] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aedd4dd9d4154f6baf508cd3d1fc46b9 [ 694.703253] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-446dcfa1-4955-43b1-b838-91b9f68dd25f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.710857] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e5058ae-91e5-417b-9439-6e0abda000ba {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.749754] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e80951e-e2e1-40ec-b7c0-914e2c252cdf {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.760037] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b63036ac-8a98-43b1-a3b3-89bf3540eeda {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.778741] env[61273]: DEBUG nova.compute.provider_tree [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 694.779298] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Expecting reply to msg 64c795d7308c42c583a49dd7c1edb3dc in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 694.791558] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] Expecting reply to msg 75cde58bddf84b9f8d4feb961d340e6b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 694.798421] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 64c795d7308c42c583a49dd7c1edb3dc [ 694.845102] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 75cde58bddf84b9f8d4feb961d340e6b [ 695.282540] env[61273]: DEBUG nova.scheduler.client.report [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 695.285138] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Expecting reply to msg 4ac41fa903f94efcbdaa87b20fb73d5a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 695.294529] env[61273]: DEBUG nova.compute.manager [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 695.297750] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4ac41fa903f94efcbdaa87b20fb73d5a [ 695.316388] env[61273]: DEBUG nova.virt.hardware [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 695.316613] env[61273]: DEBUG nova.virt.hardware [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 695.316771] env[61273]: DEBUG nova.virt.hardware [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 695.316946] env[61273]: DEBUG nova.virt.hardware [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 695.317083] env[61273]: DEBUG nova.virt.hardware [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 695.317222] env[61273]: DEBUG nova.virt.hardware [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 695.317418] env[61273]: DEBUG nova.virt.hardware [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 695.317568] env[61273]: DEBUG nova.virt.hardware [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 695.317727] env[61273]: DEBUG nova.virt.hardware [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 695.317881] env[61273]: DEBUG nova.virt.hardware [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 695.318044] env[61273]: DEBUG nova.virt.hardware [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 695.318886] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e00133c-3595-4428-be47-ea766bf98053 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.332377] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63e547f2-b563-4984-823a-06195e48ac84 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.732461] env[61273]: DEBUG nova.compute.manager [req-9e9d3ceb-ae57-49d4-88ee-fd65e0530994 req-8a840810-c980-4ee6-b1d8-d32bb84a68dc service nova] [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] Received event network-changed-9d5ceb62-1624-4a37-9b8b-42997a4fb38c {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 695.732683] env[61273]: DEBUG nova.compute.manager [req-9e9d3ceb-ae57-49d4-88ee-fd65e0530994 req-8a840810-c980-4ee6-b1d8-d32bb84a68dc service nova] [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] Refreshing instance network info cache due to event network-changed-9d5ceb62-1624-4a37-9b8b-42997a4fb38c. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 695.732911] env[61273]: DEBUG oslo_concurrency.lockutils [req-9e9d3ceb-ae57-49d4-88ee-fd65e0530994 req-8a840810-c980-4ee6-b1d8-d32bb84a68dc service nova] Acquiring lock "refresh_cache-e2560c8e-61c6-4343-82cb-47dc5b1997fb" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 695.733052] env[61273]: DEBUG oslo_concurrency.lockutils [req-9e9d3ceb-ae57-49d4-88ee-fd65e0530994 req-8a840810-c980-4ee6-b1d8-d32bb84a68dc service nova] Acquired lock "refresh_cache-e2560c8e-61c6-4343-82cb-47dc5b1997fb" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 695.733208] env[61273]: DEBUG nova.network.neutron [req-9e9d3ceb-ae57-49d4-88ee-fd65e0530994 req-8a840810-c980-4ee6-b1d8-d32bb84a68dc service nova] [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] Refreshing network info cache for port 9d5ceb62-1624-4a37-9b8b-42997a4fb38c {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 695.733620] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-9e9d3ceb-ae57-49d4-88ee-fd65e0530994 req-8a840810-c980-4ee6-b1d8-d32bb84a68dc service nova] Expecting reply to msg ffc13cdcdc164a80902cfb36a709b996 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 695.740500] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ffc13cdcdc164a80902cfb36a709b996 [ 695.791240] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.516s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 695.791240] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Expecting reply to msg f451b05690f54e58917da39fc7e886aa in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 695.791240] env[61273]: DEBUG oslo_concurrency.lockutils [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.023s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 695.791240] env[61273]: INFO nova.compute.claims [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 695.793036] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Expecting reply to msg d6de911d52ff4a8f81eefef99e068a13 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 695.809209] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f451b05690f54e58917da39fc7e886aa [ 695.879097] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d6de911d52ff4a8f81eefef99e068a13 [ 695.909868] env[61273]: ERROR nova.compute.manager [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 9d5ceb62-1624-4a37-9b8b-42997a4fb38c, please check neutron logs for more information. [ 695.909868] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 695.909868] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 695.909868] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 695.909868] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 695.909868] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 695.909868] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 695.909868] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 695.909868] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 695.909868] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 695.909868] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 695.909868] env[61273]: ERROR nova.compute.manager raise self.value [ 695.909868] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 695.909868] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 695.909868] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 695.909868] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 695.910370] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 695.910370] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 695.910370] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 9d5ceb62-1624-4a37-9b8b-42997a4fb38c, please check neutron logs for more information. [ 695.910370] env[61273]: ERROR nova.compute.manager [ 695.910370] env[61273]: Traceback (most recent call last): [ 695.910370] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 695.910370] env[61273]: listener.cb(fileno) [ 695.910370] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 695.910370] env[61273]: result = function(*args, **kwargs) [ 695.910370] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 695.910370] env[61273]: return func(*args, **kwargs) [ 695.910370] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 695.910370] env[61273]: raise e [ 695.910370] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 695.910370] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 695.910370] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 695.910370] env[61273]: created_port_ids = self._update_ports_for_instance( [ 695.910370] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 695.910370] env[61273]: with excutils.save_and_reraise_exception(): [ 695.910370] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 695.910370] env[61273]: self.force_reraise() [ 695.910370] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 695.910370] env[61273]: raise self.value [ 695.910370] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 695.910370] env[61273]: updated_port = self._update_port( [ 695.910370] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 695.910370] env[61273]: _ensure_no_port_binding_failure(port) [ 695.910370] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 695.910370] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 695.911419] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 9d5ceb62-1624-4a37-9b8b-42997a4fb38c, please check neutron logs for more information. [ 695.911419] env[61273]: Removing descriptor: 19 [ 695.911419] env[61273]: ERROR nova.compute.manager [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 9d5ceb62-1624-4a37-9b8b-42997a4fb38c, please check neutron logs for more information. [ 695.911419] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] Traceback (most recent call last): [ 695.911419] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 695.911419] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] yield resources [ 695.911419] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 695.911419] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] self.driver.spawn(context, instance, image_meta, [ 695.911419] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 695.911419] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 695.911419] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 695.911419] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] vm_ref = self.build_virtual_machine(instance, [ 695.911834] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 695.911834] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] vif_infos = vmwarevif.get_vif_info(self._session, [ 695.911834] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 695.911834] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] for vif in network_info: [ 695.911834] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 695.911834] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] return self._sync_wrapper(fn, *args, **kwargs) [ 695.911834] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 695.911834] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] self.wait() [ 695.911834] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 695.911834] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] self[:] = self._gt.wait() [ 695.911834] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 695.911834] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] return self._exit_event.wait() [ 695.911834] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 695.912296] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] result = hub.switch() [ 695.912296] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 695.912296] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] return self.greenlet.switch() [ 695.912296] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 695.912296] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] result = function(*args, **kwargs) [ 695.912296] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 695.912296] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] return func(*args, **kwargs) [ 695.912296] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 695.912296] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] raise e [ 695.912296] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 695.912296] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] nwinfo = self.network_api.allocate_for_instance( [ 695.912296] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 695.912296] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] created_port_ids = self._update_ports_for_instance( [ 695.912682] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 695.912682] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] with excutils.save_and_reraise_exception(): [ 695.912682] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 695.912682] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] self.force_reraise() [ 695.912682] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 695.912682] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] raise self.value [ 695.912682] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 695.912682] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] updated_port = self._update_port( [ 695.912682] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 695.912682] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] _ensure_no_port_binding_failure(port) [ 695.912682] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 695.912682] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] raise exception.PortBindingFailed(port_id=port['id']) [ 695.913104] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] nova.exception.PortBindingFailed: Binding failed for port 9d5ceb62-1624-4a37-9b8b-42997a4fb38c, please check neutron logs for more information. [ 695.913104] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] [ 695.913104] env[61273]: INFO nova.compute.manager [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] Terminating instance [ 695.920141] env[61273]: DEBUG oslo_concurrency.lockutils [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] Acquiring lock "refresh_cache-e2560c8e-61c6-4343-82cb-47dc5b1997fb" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 695.967838] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] Acquiring lock "0b400fe1-d0d0-4820-9f56-56ccbad5465a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 695.968483] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] Lock "0b400fe1-d0d0-4820-9f56-56ccbad5465a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 696.254152] env[61273]: DEBUG nova.network.neutron [req-9e9d3ceb-ae57-49d4-88ee-fd65e0530994 req-8a840810-c980-4ee6-b1d8-d32bb84a68dc service nova] [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 696.296936] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Expecting reply to msg 1cc717d69d794d5396dd0f42e2011a3b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 696.298329] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Acquiring lock "2eb90c88-8554-4955-87de-3767f4a114fe" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 696.298591] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Lock "2eb90c88-8554-4955-87de-3767f4a114fe" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 696.299005] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Expecting reply to msg 580b8d3d124f46d0b3cfd94de601418a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 696.305748] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 580b8d3d124f46d0b3cfd94de601418a [ 696.306678] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1cc717d69d794d5396dd0f42e2011a3b [ 696.474958] env[61273]: DEBUG nova.network.neutron [req-9e9d3ceb-ae57-49d4-88ee-fd65e0530994 req-8a840810-c980-4ee6-b1d8-d32bb84a68dc service nova] [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 696.475494] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-9e9d3ceb-ae57-49d4-88ee-fd65e0530994 req-8a840810-c980-4ee6-b1d8-d32bb84a68dc service nova] Expecting reply to msg 8f72ae0f681d4860852e6cfbd32673d7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 696.484744] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8f72ae0f681d4860852e6cfbd32673d7 [ 696.802098] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Lock "2eb90c88-8554-4955-87de-3767f4a114fe" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.503s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 696.802642] env[61273]: DEBUG nova.compute.manager [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 696.804450] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Expecting reply to msg 3a5ab290f3704528a3b72d5f962797fd in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 696.863205] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3a5ab290f3704528a3b72d5f962797fd [ 696.977858] env[61273]: DEBUG oslo_concurrency.lockutils [req-9e9d3ceb-ae57-49d4-88ee-fd65e0530994 req-8a840810-c980-4ee6-b1d8-d32bb84a68dc service nova] Releasing lock "refresh_cache-e2560c8e-61c6-4343-82cb-47dc5b1997fb" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 696.983670] env[61273]: DEBUG oslo_concurrency.lockutils [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] Acquired lock "refresh_cache-e2560c8e-61c6-4343-82cb-47dc5b1997fb" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 696.983670] env[61273]: DEBUG nova.network.neutron [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 696.983670] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] Expecting reply to msg d28fefec88274f3a8de44ea721c980d4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 696.988608] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d28fefec88274f3a8de44ea721c980d4 [ 697.221236] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07856368-2600-4c65-a031-e9f8f3befc28 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.229089] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35bc45a5-99fb-4805-9f97-0676af5a346f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.260302] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aab12b8b-d259-4341-bda4-76a735d1ddfd {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.268435] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7e5269b-810e-4086-95bb-9c6646924b2a {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.280821] env[61273]: DEBUG nova.compute.provider_tree [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 697.281358] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Expecting reply to msg 447ced291c414f07abcabc11fcfb0347 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 697.288634] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 447ced291c414f07abcabc11fcfb0347 [ 697.316534] env[61273]: DEBUG nova.compute.utils [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 697.316534] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Expecting reply to msg 5d8eca7da0be489daa29d3137ba7a47d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 697.316534] env[61273]: DEBUG nova.compute.manager [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 697.316534] env[61273]: DEBUG nova.network.neutron [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 697.324508] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5d8eca7da0be489daa29d3137ba7a47d [ 697.374186] env[61273]: DEBUG nova.policy [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c29f650cbd91401d995341c03f795e88', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '683850b1cecd48ad9939566a9745c1aa', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 697.507004] env[61273]: DEBUG nova.network.neutron [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 697.613949] env[61273]: DEBUG nova.network.neutron [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 697.614784] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] Expecting reply to msg 81d6af9c53e944979245265340d4f538 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 697.624440] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 81d6af9c53e944979245265340d4f538 [ 697.762792] env[61273]: DEBUG nova.network.neutron [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] Successfully created port: cd73e761-b035-4816-a3f2-ac6f23681fc7 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 697.784243] env[61273]: DEBUG nova.scheduler.client.report [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 697.786905] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Expecting reply to msg 1c1e496c8a9e4432a7bae0bae2fa6d42 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 697.802671] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1c1e496c8a9e4432a7bae0bae2fa6d42 [ 697.818537] env[61273]: DEBUG nova.compute.manager [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 697.820248] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Expecting reply to msg 986b9e8771224506b6f0a6552798a053 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 697.860167] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 986b9e8771224506b6f0a6552798a053 [ 698.116893] env[61273]: DEBUG oslo_concurrency.lockutils [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] Releasing lock "refresh_cache-e2560c8e-61c6-4343-82cb-47dc5b1997fb" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 698.117334] env[61273]: DEBUG nova.compute.manager [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 698.117640] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 698.117843] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f781cd03-8e91-42b9-ab9a-4564e4f202af {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.127301] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-561e6e67-4c3f-468b-9f20-5403632ded94 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.146568] env[61273]: DEBUG nova.compute.manager [req-40094169-6dd4-4a27-8292-5bd4e4315103 req-01e55ab5-9b0d-4012-8e74-98d719824ec3 service nova] [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] Received event network-vif-deleted-9d5ceb62-1624-4a37-9b8b-42997a4fb38c {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 698.154013] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e2560c8e-61c6-4343-82cb-47dc5b1997fb could not be found. [ 698.154218] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 698.154609] env[61273]: INFO nova.compute.manager [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] Took 0.04 seconds to destroy the instance on the hypervisor. [ 698.154834] env[61273]: DEBUG oslo.service.loopingcall [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 698.155094] env[61273]: DEBUG nova.compute.manager [-] [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 698.155199] env[61273]: DEBUG nova.network.neutron [-] [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 698.170052] env[61273]: DEBUG nova.network.neutron [-] [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 698.170613] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg c5e9ddbd17804f5899a3d0ec7a76b937 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 698.177950] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c5e9ddbd17804f5899a3d0ec7a76b937 [ 698.289613] env[61273]: DEBUG oslo_concurrency.lockutils [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.501s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 698.289995] env[61273]: DEBUG nova.compute.manager [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 698.291983] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Expecting reply to msg 18bd055f3c354c78992cbfbf13f4036f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 698.293581] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.554s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 698.299383] env[61273]: INFO nova.compute.claims [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 698.301828] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Expecting reply to msg 5d1a9c95e4944cc89b9539a8a60b5158 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 698.324477] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Expecting reply to msg 45e727bb85454934b0b4d9c6d9206b0c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 698.353360] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5d1a9c95e4944cc89b9539a8a60b5158 [ 698.361154] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 18bd055f3c354c78992cbfbf13f4036f [ 698.381085] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 45e727bb85454934b0b4d9c6d9206b0c [ 698.676104] env[61273]: DEBUG nova.network.neutron [-] [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 698.676104] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 07f479e2c531464788ef8275c813291a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 698.682639] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 07f479e2c531464788ef8275c813291a [ 698.795807] env[61273]: DEBUG nova.compute.utils [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 698.796499] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Expecting reply to msg 15c1a8e17d9842c2ba7b423fe9f8c39c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 698.797836] env[61273]: DEBUG nova.compute.manager [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 698.798009] env[61273]: DEBUG nova.network.neutron [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 698.805442] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Expecting reply to msg a9e0ee4498a144a9858aaa3cca8210b9 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 698.816888] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 15c1a8e17d9842c2ba7b423fe9f8c39c [ 698.819752] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a9e0ee4498a144a9858aaa3cca8210b9 [ 698.829782] env[61273]: DEBUG nova.compute.manager [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 698.856184] env[61273]: DEBUG nova.policy [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e785a722b76c42ac91c04e9e9958183f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '039a458fcf5642eaae5d4353f569d660', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 698.857654] env[61273]: DEBUG nova.virt.hardware [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 698.857896] env[61273]: DEBUG nova.virt.hardware [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 698.858110] env[61273]: DEBUG nova.virt.hardware [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 698.859003] env[61273]: DEBUG nova.virt.hardware [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 698.859003] env[61273]: DEBUG nova.virt.hardware [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 698.859003] env[61273]: DEBUG nova.virt.hardware [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 698.859003] env[61273]: DEBUG nova.virt.hardware [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 698.859003] env[61273]: DEBUG nova.virt.hardware [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 698.859230] env[61273]: DEBUG nova.virt.hardware [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 698.859230] env[61273]: DEBUG nova.virt.hardware [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 698.859298] env[61273]: DEBUG nova.virt.hardware [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 698.860479] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74ada209-4c3f-49e6-9a42-e9504b63ce29 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.868111] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-655fed72-87ee-4177-bf42-a3bbc28e5613 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.149323] env[61273]: DEBUG oslo_concurrency.lockutils [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Acquiring lock "e6108eed-93b4-40a5-a61b-67aa5bbe2fda" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 699.149549] env[61273]: DEBUG oslo_concurrency.lockutils [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Lock "e6108eed-93b4-40a5-a61b-67aa5bbe2fda" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 699.176553] env[61273]: INFO nova.compute.manager [-] [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] Took 1.02 seconds to deallocate network for instance. [ 699.181575] env[61273]: DEBUG nova.compute.claims [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 699.181825] env[61273]: DEBUG oslo_concurrency.lockutils [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 699.197409] env[61273]: DEBUG nova.network.neutron [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] Successfully created port: e4f545c6-0fe7-4d41-a153-b9d2b62b4dc4 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 699.254129] env[61273]: ERROR nova.compute.manager [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port cd73e761-b035-4816-a3f2-ac6f23681fc7, please check neutron logs for more information. [ 699.254129] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 699.254129] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 699.254129] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 699.254129] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 699.254129] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 699.254129] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 699.254129] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 699.254129] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 699.254129] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 699.254129] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 699.254129] env[61273]: ERROR nova.compute.manager raise self.value [ 699.254129] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 699.254129] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 699.254129] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 699.254129] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 699.254916] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 699.254916] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 699.254916] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port cd73e761-b035-4816-a3f2-ac6f23681fc7, please check neutron logs for more information. [ 699.254916] env[61273]: ERROR nova.compute.manager [ 699.254916] env[61273]: Traceback (most recent call last): [ 699.254916] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 699.254916] env[61273]: listener.cb(fileno) [ 699.254916] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 699.254916] env[61273]: result = function(*args, **kwargs) [ 699.254916] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 699.254916] env[61273]: return func(*args, **kwargs) [ 699.254916] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 699.254916] env[61273]: raise e [ 699.254916] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 699.254916] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 699.254916] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 699.254916] env[61273]: created_port_ids = self._update_ports_for_instance( [ 699.254916] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 699.254916] env[61273]: with excutils.save_and_reraise_exception(): [ 699.254916] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 699.254916] env[61273]: self.force_reraise() [ 699.254916] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 699.254916] env[61273]: raise self.value [ 699.254916] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 699.254916] env[61273]: updated_port = self._update_port( [ 699.254916] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 699.254916] env[61273]: _ensure_no_port_binding_failure(port) [ 699.254916] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 699.254916] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 699.256343] env[61273]: nova.exception.PortBindingFailed: Binding failed for port cd73e761-b035-4816-a3f2-ac6f23681fc7, please check neutron logs for more information. [ 699.256343] env[61273]: Removing descriptor: 19 [ 699.256343] env[61273]: ERROR nova.compute.manager [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port cd73e761-b035-4816-a3f2-ac6f23681fc7, please check neutron logs for more information. [ 699.256343] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] Traceback (most recent call last): [ 699.256343] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 699.256343] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] yield resources [ 699.256343] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 699.256343] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] self.driver.spawn(context, instance, image_meta, [ 699.256343] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 699.256343] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] self._vmops.spawn(context, instance, image_meta, injected_files, [ 699.256343] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 699.256343] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] vm_ref = self.build_virtual_machine(instance, [ 699.256928] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 699.256928] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] vif_infos = vmwarevif.get_vif_info(self._session, [ 699.256928] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 699.256928] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] for vif in network_info: [ 699.256928] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 699.256928] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] return self._sync_wrapper(fn, *args, **kwargs) [ 699.256928] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 699.256928] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] self.wait() [ 699.256928] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 699.256928] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] self[:] = self._gt.wait() [ 699.256928] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 699.256928] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] return self._exit_event.wait() [ 699.256928] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 699.257543] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] result = hub.switch() [ 699.257543] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 699.257543] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] return self.greenlet.switch() [ 699.257543] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 699.257543] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] result = function(*args, **kwargs) [ 699.257543] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 699.257543] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] return func(*args, **kwargs) [ 699.257543] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 699.257543] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] raise e [ 699.257543] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 699.257543] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] nwinfo = self.network_api.allocate_for_instance( [ 699.257543] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 699.257543] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] created_port_ids = self._update_ports_for_instance( [ 699.258160] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 699.258160] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] with excutils.save_and_reraise_exception(): [ 699.258160] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 699.258160] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] self.force_reraise() [ 699.258160] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 699.258160] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] raise self.value [ 699.258160] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 699.258160] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] updated_port = self._update_port( [ 699.258160] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 699.258160] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] _ensure_no_port_binding_failure(port) [ 699.258160] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 699.258160] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] raise exception.PortBindingFailed(port_id=port['id']) [ 699.258957] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] nova.exception.PortBindingFailed: Binding failed for port cd73e761-b035-4816-a3f2-ac6f23681fc7, please check neutron logs for more information. [ 699.258957] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] [ 699.258957] env[61273]: INFO nova.compute.manager [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] Terminating instance [ 699.258957] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Acquiring lock "refresh_cache-3635532a-2af3-4ef5-a922-37fc763c9708" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 699.258957] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Acquired lock "refresh_cache-3635532a-2af3-4ef5-a922-37fc763c9708" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 699.258957] env[61273]: DEBUG nova.network.neutron [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 699.258957] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Expecting reply to msg 84badfc3652d4d4985be1f38e24872db in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 699.265364] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 84badfc3652d4d4985be1f38e24872db [ 699.301602] env[61273]: DEBUG nova.compute.manager [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 699.303388] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Expecting reply to msg 861ca095766b43fb9cdb9ca791006034 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 699.336061] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 861ca095766b43fb9cdb9ca791006034 [ 699.661365] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf863918-faa2-49fc-9dc7-31d26fdf9cab {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.669108] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7671d96d-2284-49ff-8b13-d622f7fbadd1 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.699853] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1b21df4-7388-472a-a6ff-6851b55cd33c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.707080] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-367471fd-d998-49b7-9e86-adbfc7d456a4 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.720044] env[61273]: DEBUG nova.compute.provider_tree [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 699.720563] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Expecting reply to msg d9de061ab1e24ce89f1c306feae72b46 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 699.727419] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d9de061ab1e24ce89f1c306feae72b46 [ 699.796075] env[61273]: DEBUG nova.network.neutron [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 699.806259] env[61273]: INFO nova.virt.block_device [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] Booting with volume 770432a3-4ca1-4717-9890-c964bd2d2419 at /dev/sda [ 699.852608] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-10878aad-5191-412a-855c-1d90a6e45873 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.861247] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-871cc0f1-dc6b-4fb7-8ffc-51b9ea1bb24f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.884700] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-372e62bb-fc4c-4916-8004-a443730dd592 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.894649] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a7b440f-a031-4632-a064-b61d11a6ef0c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.907234] env[61273]: DEBUG nova.network.neutron [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 699.908071] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Expecting reply to msg b1f8ad0a4beb4a489c421c41cd9f8fe5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 699.913157] env[61273]: DEBUG nova.compute.manager [req-acf4f053-f2a6-495f-a4ef-23c37a928362 req-8351422c-5308-47d2-bffd-186f78f69242 service nova] [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] Received event network-changed-e4f545c6-0fe7-4d41-a153-b9d2b62b4dc4 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 699.913512] env[61273]: DEBUG nova.compute.manager [req-acf4f053-f2a6-495f-a4ef-23c37a928362 req-8351422c-5308-47d2-bffd-186f78f69242 service nova] [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] Refreshing instance network info cache due to event network-changed-e4f545c6-0fe7-4d41-a153-b9d2b62b4dc4. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 699.913874] env[61273]: DEBUG oslo_concurrency.lockutils [req-acf4f053-f2a6-495f-a4ef-23c37a928362 req-8351422c-5308-47d2-bffd-186f78f69242 service nova] Acquiring lock "refresh_cache-144c3c21-b18e-4997-a241-8ff21a3b4835" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 699.914166] env[61273]: DEBUG oslo_concurrency.lockutils [req-acf4f053-f2a6-495f-a4ef-23c37a928362 req-8351422c-5308-47d2-bffd-186f78f69242 service nova] Acquired lock "refresh_cache-144c3c21-b18e-4997-a241-8ff21a3b4835" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 699.914536] env[61273]: DEBUG nova.network.neutron [req-acf4f053-f2a6-495f-a4ef-23c37a928362 req-8351422c-5308-47d2-bffd-186f78f69242 service nova] [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] Refreshing network info cache for port e4f545c6-0fe7-4d41-a153-b9d2b62b4dc4 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 699.915061] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-acf4f053-f2a6-495f-a4ef-23c37a928362 req-8351422c-5308-47d2-bffd-186f78f69242 service nova] Expecting reply to msg f19dee6e48c14e5ab92e2d9d93d16ba6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 699.921947] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b1f8ad0a4beb4a489c421c41cd9f8fe5 [ 699.922520] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f19dee6e48c14e5ab92e2d9d93d16ba6 [ 699.928225] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-181647e3-8bc4-4e98-b4a9-39965053cde8 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.935168] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-359a2bff-0809-40bc-bf02-f4c31f88e6ff {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.948682] env[61273]: DEBUG nova.virt.block_device [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] Updating existing volume attachment record: 8942fc23-3d28-4b18-a8c9-56af0c2915b3 {{(pid=61273) _volume_attach /opt/stack/nova/nova/virt/block_device.py:665}} [ 700.048721] env[61273]: ERROR nova.compute.manager [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e4f545c6-0fe7-4d41-a153-b9d2b62b4dc4, please check neutron logs for more information. [ 700.048721] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 700.048721] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 700.048721] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 700.048721] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 700.048721] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 700.048721] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 700.048721] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 700.048721] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 700.048721] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 700.048721] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 700.048721] env[61273]: ERROR nova.compute.manager raise self.value [ 700.048721] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 700.048721] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 700.048721] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 700.048721] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 700.049520] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 700.049520] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 700.049520] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e4f545c6-0fe7-4d41-a153-b9d2b62b4dc4, please check neutron logs for more information. [ 700.049520] env[61273]: ERROR nova.compute.manager [ 700.049520] env[61273]: Traceback (most recent call last): [ 700.049520] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 700.049520] env[61273]: listener.cb(fileno) [ 700.049520] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 700.049520] env[61273]: result = function(*args, **kwargs) [ 700.049520] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 700.049520] env[61273]: return func(*args, **kwargs) [ 700.049520] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 700.049520] env[61273]: raise e [ 700.049520] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 700.049520] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 700.049520] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 700.049520] env[61273]: created_port_ids = self._update_ports_for_instance( [ 700.049520] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 700.049520] env[61273]: with excutils.save_and_reraise_exception(): [ 700.049520] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 700.049520] env[61273]: self.force_reraise() [ 700.049520] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 700.049520] env[61273]: raise self.value [ 700.049520] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 700.049520] env[61273]: updated_port = self._update_port( [ 700.049520] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 700.049520] env[61273]: _ensure_no_port_binding_failure(port) [ 700.049520] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 700.049520] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 700.050466] env[61273]: nova.exception.PortBindingFailed: Binding failed for port e4f545c6-0fe7-4d41-a153-b9d2b62b4dc4, please check neutron logs for more information. [ 700.050466] env[61273]: Removing descriptor: 15 [ 700.172576] env[61273]: DEBUG nova.compute.manager [req-bbdfa1f6-f8e6-4943-b7e3-4c10edbe459d req-56509ea7-8e81-4347-90d9-b01b6b5e183b service nova] [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] Received event network-changed-cd73e761-b035-4816-a3f2-ac6f23681fc7 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 700.172778] env[61273]: DEBUG nova.compute.manager [req-bbdfa1f6-f8e6-4943-b7e3-4c10edbe459d req-56509ea7-8e81-4347-90d9-b01b6b5e183b service nova] [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] Refreshing instance network info cache due to event network-changed-cd73e761-b035-4816-a3f2-ac6f23681fc7. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 700.173032] env[61273]: DEBUG oslo_concurrency.lockutils [req-bbdfa1f6-f8e6-4943-b7e3-4c10edbe459d req-56509ea7-8e81-4347-90d9-b01b6b5e183b service nova] Acquiring lock "refresh_cache-3635532a-2af3-4ef5-a922-37fc763c9708" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 700.223187] env[61273]: DEBUG nova.scheduler.client.report [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 700.225713] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Expecting reply to msg 12d16ac2502f469a9184ef39bb27233c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 700.239491] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 12d16ac2502f469a9184ef39bb27233c [ 700.416653] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Releasing lock "refresh_cache-3635532a-2af3-4ef5-a922-37fc763c9708" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 700.417106] env[61273]: DEBUG nova.compute.manager [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 700.417300] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 700.417683] env[61273]: DEBUG oslo_concurrency.lockutils [req-bbdfa1f6-f8e6-4943-b7e3-4c10edbe459d req-56509ea7-8e81-4347-90d9-b01b6b5e183b service nova] Acquired lock "refresh_cache-3635532a-2af3-4ef5-a922-37fc763c9708" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 700.417869] env[61273]: DEBUG nova.network.neutron [req-bbdfa1f6-f8e6-4943-b7e3-4c10edbe459d req-56509ea7-8e81-4347-90d9-b01b6b5e183b service nova] [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] Refreshing network info cache for port cd73e761-b035-4816-a3f2-ac6f23681fc7 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 700.418298] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-bbdfa1f6-f8e6-4943-b7e3-4c10edbe459d req-56509ea7-8e81-4347-90d9-b01b6b5e183b service nova] Expecting reply to msg a0ed4e003716448191a0587dfbe988fc in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 700.421082] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d70b4b0e-a96b-4305-a057-c4152cc230d2 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.425815] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a0ed4e003716448191a0587dfbe988fc [ 700.430252] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9bf885b-822b-4dcb-944b-86b9c3a8d71a {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.440415] env[61273]: DEBUG nova.network.neutron [req-acf4f053-f2a6-495f-a4ef-23c37a928362 req-8351422c-5308-47d2-bffd-186f78f69242 service nova] [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 700.453907] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3635532a-2af3-4ef5-a922-37fc763c9708 could not be found. [ 700.454123] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 700.454298] env[61273]: INFO nova.compute.manager [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] Took 0.04 seconds to destroy the instance on the hypervisor. [ 700.454531] env[61273]: DEBUG oslo.service.loopingcall [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 700.454767] env[61273]: DEBUG nova.compute.manager [-] [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 700.454889] env[61273]: DEBUG nova.network.neutron [-] [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 700.469770] env[61273]: DEBUG nova.network.neutron [-] [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 700.469770] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg d576b3962bd84d7e81fdb6633e1df63b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 700.477194] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d576b3962bd84d7e81fdb6633e1df63b [ 700.501057] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Expecting reply to msg 261a09edc55b45a3920ff6c100f94e90 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 700.504572] env[61273]: DEBUG nova.network.neutron [req-acf4f053-f2a6-495f-a4ef-23c37a928362 req-8351422c-5308-47d2-bffd-186f78f69242 service nova] [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 700.505017] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-acf4f053-f2a6-495f-a4ef-23c37a928362 req-8351422c-5308-47d2-bffd-186f78f69242 service nova] Expecting reply to msg 8bfe1cefd5874978ae49c3f97712d077 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 700.512707] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 261a09edc55b45a3920ff6c100f94e90 [ 700.513146] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8bfe1cefd5874978ae49c3f97712d077 [ 700.728371] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.435s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 700.728782] env[61273]: DEBUG nova.compute.manager [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 700.730630] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Expecting reply to msg 0fa90ff0149447279e922cd5b7d790b0 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 700.731657] env[61273]: DEBUG oslo_concurrency.lockutils [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.992s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 700.733193] env[61273]: INFO nova.compute.claims [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 700.735268] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Expecting reply to msg b1b870bb0b94487ba6b8e1feeaba71fc in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 700.779117] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0fa90ff0149447279e922cd5b7d790b0 [ 700.781238] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b1b870bb0b94487ba6b8e1feeaba71fc [ 700.938668] env[61273]: DEBUG nova.network.neutron [req-bbdfa1f6-f8e6-4943-b7e3-4c10edbe459d req-56509ea7-8e81-4347-90d9-b01b6b5e183b service nova] [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 700.971974] env[61273]: DEBUG nova.network.neutron [-] [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 700.972465] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 06942901c9ca4d898e05220d3a26b86c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 700.981513] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 06942901c9ca4d898e05220d3a26b86c [ 701.007266] env[61273]: DEBUG oslo_concurrency.lockutils [req-acf4f053-f2a6-495f-a4ef-23c37a928362 req-8351422c-5308-47d2-bffd-186f78f69242 service nova] Releasing lock "refresh_cache-144c3c21-b18e-4997-a241-8ff21a3b4835" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 701.037186] env[61273]: DEBUG nova.network.neutron [req-bbdfa1f6-f8e6-4943-b7e3-4c10edbe459d req-56509ea7-8e81-4347-90d9-b01b6b5e183b service nova] [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 701.037717] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-bbdfa1f6-f8e6-4943-b7e3-4c10edbe459d req-56509ea7-8e81-4347-90d9-b01b6b5e183b service nova] Expecting reply to msg 95bb5a03ff594559bfc3b319d976c866 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 701.041687] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Expecting reply to msg b60206dcc0e4420c956f4cd24802c0d2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 701.046696] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 95bb5a03ff594559bfc3b319d976c866 [ 701.050354] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b60206dcc0e4420c956f4cd24802c0d2 [ 701.238947] env[61273]: DEBUG nova.compute.utils [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 701.239620] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Expecting reply to msg a20cc5ee65ab4bc19688d89954027e7e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 701.241755] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Expecting reply to msg e193087d30e44ef6b48a560794f5d209 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 701.242829] env[61273]: DEBUG nova.compute.manager [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 701.242994] env[61273]: DEBUG nova.network.neutron [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 701.249666] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e193087d30e44ef6b48a560794f5d209 [ 701.252593] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a20cc5ee65ab4bc19688d89954027e7e [ 701.280359] env[61273]: DEBUG nova.policy [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b3174ce41afd40c188eb376297330d8c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '233511ca341849e3a8d33ee8152f429a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 701.474978] env[61273]: INFO nova.compute.manager [-] [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] Took 1.02 seconds to deallocate network for instance. [ 701.477479] env[61273]: DEBUG nova.compute.claims [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 701.477645] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 701.539978] env[61273]: DEBUG oslo_concurrency.lockutils [req-bbdfa1f6-f8e6-4943-b7e3-4c10edbe459d req-56509ea7-8e81-4347-90d9-b01b6b5e183b service nova] Releasing lock "refresh_cache-3635532a-2af3-4ef5-a922-37fc763c9708" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 701.540310] env[61273]: DEBUG nova.compute.manager [req-bbdfa1f6-f8e6-4943-b7e3-4c10edbe459d req-56509ea7-8e81-4347-90d9-b01b6b5e183b service nova] [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] Received event network-vif-deleted-cd73e761-b035-4816-a3f2-ac6f23681fc7 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 701.545585] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Expecting reply to msg 13f4867de9dd4e05aacf268316770c70 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 701.562537] env[61273]: DEBUG nova.network.neutron [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] Successfully created port: 469e6b69-bd24-49e1-a6b3-a58c8e848eba {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 701.578842] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 13f4867de9dd4e05aacf268316770c70 [ 701.744404] env[61273]: DEBUG nova.compute.manager [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 701.746321] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Expecting reply to msg 6320be1ba9a14022b604ad332779d24f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 701.790979] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6320be1ba9a14022b604ad332779d24f [ 702.008846] env[61273]: DEBUG nova.compute.manager [req-be457953-1c1b-4afe-9f0d-f3310630cd55 req-98099589-a6fb-43c8-928a-06a99799d3f4 service nova] [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] Received event network-vif-deleted-e4f545c6-0fe7-4d41-a153-b9d2b62b4dc4 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 702.048491] env[61273]: DEBUG nova.compute.manager [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 702.048998] env[61273]: DEBUG nova.virt.hardware [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 702.049253] env[61273]: DEBUG nova.virt.hardware [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 702.049362] env[61273]: DEBUG nova.virt.hardware [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 702.049525] env[61273]: DEBUG nova.virt.hardware [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 702.049699] env[61273]: DEBUG nova.virt.hardware [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 702.049821] env[61273]: DEBUG nova.virt.hardware [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 702.049994] env[61273]: DEBUG nova.virt.hardware [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 702.050159] env[61273]: DEBUG nova.virt.hardware [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 702.050396] env[61273]: DEBUG nova.virt.hardware [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 702.050573] env[61273]: DEBUG nova.virt.hardware [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 702.050687] env[61273]: DEBUG nova.virt.hardware [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 702.053859] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab78e033-e1fe-48a3-94a8-dfa668eca49d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.062208] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03cc8b25-5252-4732-b94f-74701412fb65 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.075585] env[61273]: ERROR nova.compute.manager [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e4f545c6-0fe7-4d41-a153-b9d2b62b4dc4, please check neutron logs for more information. [ 702.075585] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] Traceback (most recent call last): [ 702.075585] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 702.075585] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] yield resources [ 702.075585] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 702.075585] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] self.driver.spawn(context, instance, image_meta, [ 702.075585] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 702.075585] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] self._vmops.spawn(context, instance, image_meta, injected_files, [ 702.075585] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 702.075585] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] vm_ref = self.build_virtual_machine(instance, [ 702.075585] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 702.076090] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] vif_infos = vmwarevif.get_vif_info(self._session, [ 702.076090] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 702.076090] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] for vif in network_info: [ 702.076090] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 702.076090] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] return self._sync_wrapper(fn, *args, **kwargs) [ 702.076090] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 702.076090] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] self.wait() [ 702.076090] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 702.076090] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] self[:] = self._gt.wait() [ 702.076090] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 702.076090] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] return self._exit_event.wait() [ 702.076090] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 702.076090] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] current.throw(*self._exc) [ 702.076511] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 702.076511] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] result = function(*args, **kwargs) [ 702.076511] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 702.076511] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] return func(*args, **kwargs) [ 702.076511] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 702.076511] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] raise e [ 702.076511] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 702.076511] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] nwinfo = self.network_api.allocate_for_instance( [ 702.076511] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 702.076511] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] created_port_ids = self._update_ports_for_instance( [ 702.076511] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 702.076511] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] with excutils.save_and_reraise_exception(): [ 702.076511] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 702.076976] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] self.force_reraise() [ 702.076976] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 702.076976] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] raise self.value [ 702.076976] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 702.076976] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] updated_port = self._update_port( [ 702.076976] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 702.076976] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] _ensure_no_port_binding_failure(port) [ 702.076976] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 702.076976] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] raise exception.PortBindingFailed(port_id=port['id']) [ 702.076976] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] nova.exception.PortBindingFailed: Binding failed for port e4f545c6-0fe7-4d41-a153-b9d2b62b4dc4, please check neutron logs for more information. [ 702.076976] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] [ 702.076976] env[61273]: INFO nova.compute.manager [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] Terminating instance [ 702.080439] env[61273]: DEBUG oslo_concurrency.lockutils [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Acquiring lock "refresh_cache-144c3c21-b18e-4997-a241-8ff21a3b4835" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 702.080604] env[61273]: DEBUG oslo_concurrency.lockutils [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Acquired lock "refresh_cache-144c3c21-b18e-4997-a241-8ff21a3b4835" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 702.080771] env[61273]: DEBUG nova.network.neutron [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 702.081178] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Expecting reply to msg ea0f50d9ef6b45c6b4aa9fb8f66de058 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 702.088575] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ea0f50d9ef6b45c6b4aa9fb8f66de058 [ 702.176925] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ea1f83d-90ea-415a-aab1-ef1710944b47 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.184835] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daa71146-5cab-49e6-9b4f-d01800e30fca {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.215395] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cf2d9a5-a2c6-4828-a8be-2db11df75d1b {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.222549] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a133da42-48bf-48f2-9ff3-d3428f4959de {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.235862] env[61273]: DEBUG nova.compute.provider_tree [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 702.236373] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Expecting reply to msg 1ba74e7c3bde43ff9dda4fdf048dcf72 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 702.243647] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1ba74e7c3bde43ff9dda4fdf048dcf72 [ 702.260035] env[61273]: INFO nova.virt.block_device [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] Booting with volume 4602f78a-d619-4e07-8958-2029cb61c217 at /dev/sda [ 702.296753] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-af3afcf4-3405-4f8f-a09f-b6e1b2ff80df {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.305438] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ffe2150-19cd-43ed-ae4d-6ba07e53f3dc {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.333961] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-df142408-4be2-43db-960a-f81794fe43c6 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.341572] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-117f6d9e-1703-4b11-941c-d963000e74d5 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.362926] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f5fe5aa-b37f-4ae3-9f2b-057f869a1370 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.369375] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-805de1cf-ae19-497d-9122-042f07f7d396 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.382926] env[61273]: DEBUG nova.virt.block_device [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] Updating existing volume attachment record: baf3a147-1844-4f03-9b94-f8915669e405 {{(pid=61273) _volume_attach /opt/stack/nova/nova/virt/block_device.py:665}} [ 702.395469] env[61273]: ERROR nova.compute.manager [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 469e6b69-bd24-49e1-a6b3-a58c8e848eba, please check neutron logs for more information. [ 702.395469] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 702.395469] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 702.395469] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 702.395469] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 702.395469] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 702.395469] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 702.395469] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 702.395469] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 702.395469] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 702.395469] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 702.395469] env[61273]: ERROR nova.compute.manager raise self.value [ 702.395469] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 702.395469] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 702.395469] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 702.395469] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 702.396091] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 702.396091] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 702.396091] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 469e6b69-bd24-49e1-a6b3-a58c8e848eba, please check neutron logs for more information. [ 702.396091] env[61273]: ERROR nova.compute.manager [ 702.396091] env[61273]: Traceback (most recent call last): [ 702.396091] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 702.396091] env[61273]: listener.cb(fileno) [ 702.396091] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 702.396091] env[61273]: result = function(*args, **kwargs) [ 702.396091] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 702.396091] env[61273]: return func(*args, **kwargs) [ 702.396091] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 702.396091] env[61273]: raise e [ 702.396091] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 702.396091] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 702.396091] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 702.396091] env[61273]: created_port_ids = self._update_ports_for_instance( [ 702.396091] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 702.396091] env[61273]: with excutils.save_and_reraise_exception(): [ 702.396091] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 702.396091] env[61273]: self.force_reraise() [ 702.396091] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 702.396091] env[61273]: raise self.value [ 702.396091] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 702.396091] env[61273]: updated_port = self._update_port( [ 702.396091] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 702.396091] env[61273]: _ensure_no_port_binding_failure(port) [ 702.396091] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 702.396091] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 702.396971] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 469e6b69-bd24-49e1-a6b3-a58c8e848eba, please check neutron logs for more information. [ 702.396971] env[61273]: Removing descriptor: 15 [ 702.597811] env[61273]: DEBUG nova.network.neutron [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 702.666255] env[61273]: DEBUG nova.network.neutron [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 702.666802] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Expecting reply to msg 83c5a1dcda65472d975e7bb8febc0fd8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 702.675032] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 83c5a1dcda65472d975e7bb8febc0fd8 [ 702.738982] env[61273]: DEBUG nova.scheduler.client.report [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 702.741560] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Expecting reply to msg c615262f8a60446ab4176b0fc1c43e70 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 702.762183] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c615262f8a60446ab4176b0fc1c43e70 [ 702.925103] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Expecting reply to msg 118fdfa17035455cb1a7d13bf0149b8e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 702.935948] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 118fdfa17035455cb1a7d13bf0149b8e [ 703.169163] env[61273]: DEBUG oslo_concurrency.lockutils [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Releasing lock "refresh_cache-144c3c21-b18e-4997-a241-8ff21a3b4835" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 703.169691] env[61273]: DEBUG nova.compute.manager [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 703.170034] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3558fc64-c94c-4899-ae35-58e83cfd39f2 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.183010] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89c45955-5ddc-4c4c-960f-ec751757c874 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.202844] env[61273]: WARNING nova.virt.vmwareapi.driver [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance 144c3c21-b18e-4997-a241-8ff21a3b4835 could not be found. [ 703.203082] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 703.203363] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0e01b46a-c5a5-457d-977b-2ba088b58636 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.210990] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81d83d26-e7c0-458d-ae0c-e68b0ab9f54a {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.231608] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 144c3c21-b18e-4997-a241-8ff21a3b4835 could not be found. [ 703.231608] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 703.231608] env[61273]: INFO nova.compute.manager [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] Took 0.06 seconds to destroy the instance on the hypervisor. [ 703.231877] env[61273]: DEBUG oslo.service.loopingcall [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 703.232136] env[61273]: DEBUG nova.compute.manager [-] [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 703.232232] env[61273]: DEBUG nova.network.neutron [-] [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 703.244774] env[61273]: DEBUG oslo_concurrency.lockutils [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.513s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 703.245256] env[61273]: DEBUG nova.compute.manager [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 703.247015] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Expecting reply to msg 8ffa16a049164e7083511ca9ebfbee2f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 703.248453] env[61273]: DEBUG nova.network.neutron [-] [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 703.248904] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg dc4e048375f247308076f2c14f3ac025 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 703.249652] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.342s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 703.251741] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] Expecting reply to msg f5fbac0bde1e40b48bd49351d319ef76 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 703.269435] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dc4e048375f247308076f2c14f3ac025 [ 703.313121] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8ffa16a049164e7083511ca9ebfbee2f [ 703.321774] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f5fbac0bde1e40b48bd49351d319ef76 [ 703.461125] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Expecting reply to msg 8f539181e88844e3943599afc118fbb7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 703.471681] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8f539181e88844e3943599afc118fbb7 [ 703.754981] env[61273]: DEBUG nova.compute.utils [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 703.755611] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Expecting reply to msg 210c84b4508a4043a6671d21591a3571 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 703.756935] env[61273]: DEBUG nova.compute.manager [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 703.757101] env[61273]: DEBUG nova.network.neutron [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 703.759037] env[61273]: DEBUG nova.network.neutron [-] [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 703.759366] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 7ea3663bd3454b8bb21209452058ee6d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 703.773245] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 210c84b4508a4043a6671d21591a3571 [ 703.773804] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7ea3663bd3454b8bb21209452058ee6d [ 703.825930] env[61273]: DEBUG nova.policy [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '438a167fe61344bc9371e3b42d5344c8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c6ac2fa4041e4f24bee18db89143d7dc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 703.964948] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Expecting reply to msg 611bda70ae114c4e8475fb0f5f51494e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 704.003419] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 611bda70ae114c4e8475fb0f5f51494e [ 704.097655] env[61273]: DEBUG nova.compute.manager [req-19c70215-e128-4580-8702-fc2c1325b268 req-83e9d550-0c03-402b-898b-61c8511050b4 service nova] [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] Received event network-changed-469e6b69-bd24-49e1-a6b3-a58c8e848eba {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 704.097855] env[61273]: DEBUG nova.compute.manager [req-19c70215-e128-4580-8702-fc2c1325b268 req-83e9d550-0c03-402b-898b-61c8511050b4 service nova] [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] Refreshing instance network info cache due to event network-changed-469e6b69-bd24-49e1-a6b3-a58c8e848eba. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 704.098066] env[61273]: DEBUG oslo_concurrency.lockutils [req-19c70215-e128-4580-8702-fc2c1325b268 req-83e9d550-0c03-402b-898b-61c8511050b4 service nova] Acquiring lock "refresh_cache-8d63e0a8-85a1-400b-a6f0-8e87c7945655" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 704.098248] env[61273]: DEBUG oslo_concurrency.lockutils [req-19c70215-e128-4580-8702-fc2c1325b268 req-83e9d550-0c03-402b-898b-61c8511050b4 service nova] Acquired lock "refresh_cache-8d63e0a8-85a1-400b-a6f0-8e87c7945655" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 704.098427] env[61273]: DEBUG nova.network.neutron [req-19c70215-e128-4580-8702-fc2c1325b268 req-83e9d550-0c03-402b-898b-61c8511050b4 service nova] [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] Refreshing network info cache for port 469e6b69-bd24-49e1-a6b3-a58c8e848eba {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 704.098842] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-19c70215-e128-4580-8702-fc2c1325b268 req-83e9d550-0c03-402b-898b-61c8511050b4 service nova] Expecting reply to msg 27c2cb68e252457f86ffba4577f628af in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 704.106765] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 27c2cb68e252457f86ffba4577f628af [ 704.140729] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eac5fce-1d21-4ba6-9031-a069f1c903ec {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.145721] env[61273]: DEBUG nova.network.neutron [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] Successfully created port: 2d7689b9-6f2a-4729-b658-80c4feb01295 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 704.150236] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca54a460-330f-4d33-a0f8-6af9d348518f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.189510] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2959a475-0d38-4ebd-adc7-936931ac51eb {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.196845] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca100351-0f24-434d-896d-f9885f9efa40 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.209473] env[61273]: DEBUG nova.compute.provider_tree [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 704.209961] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] Expecting reply to msg 25d0cc67e91d4852a7ff0ef35d82c045 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 704.217435] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 25d0cc67e91d4852a7ff0ef35d82c045 [ 704.259727] env[61273]: DEBUG nova.compute.manager [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 704.261536] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Expecting reply to msg 0f2ab7983e46458591267df53881e385 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 704.264143] env[61273]: INFO nova.compute.manager [-] [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] Took 1.03 seconds to deallocate network for instance. [ 704.302210] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0f2ab7983e46458591267df53881e385 [ 704.468448] env[61273]: DEBUG nova.compute.manager [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 704.468988] env[61273]: DEBUG nova.virt.hardware [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 704.469229] env[61273]: DEBUG nova.virt.hardware [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 704.469401] env[61273]: DEBUG nova.virt.hardware [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 704.469583] env[61273]: DEBUG nova.virt.hardware [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 704.469729] env[61273]: DEBUG nova.virt.hardware [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 704.469874] env[61273]: DEBUG nova.virt.hardware [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 704.470073] env[61273]: DEBUG nova.virt.hardware [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 704.470230] env[61273]: DEBUG nova.virt.hardware [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 704.470395] env[61273]: DEBUG nova.virt.hardware [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 704.470625] env[61273]: DEBUG nova.virt.hardware [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 704.470812] env[61273]: DEBUG nova.virt.hardware [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 704.471751] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3324c1c-46f3-40f6-9bef-52213fba9cf7 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.480212] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c661cc5-1f52-46ea-81ad-c75b11ea7641 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.494826] env[61273]: ERROR nova.compute.manager [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 469e6b69-bd24-49e1-a6b3-a58c8e848eba, please check neutron logs for more information. [ 704.494826] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] Traceback (most recent call last): [ 704.494826] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 704.494826] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] yield resources [ 704.494826] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 704.494826] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] self.driver.spawn(context, instance, image_meta, [ 704.494826] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 704.494826] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] self._vmops.spawn(context, instance, image_meta, injected_files, [ 704.494826] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 704.494826] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] vm_ref = self.build_virtual_machine(instance, [ 704.494826] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 704.495246] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] vif_infos = vmwarevif.get_vif_info(self._session, [ 704.495246] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 704.495246] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] for vif in network_info: [ 704.495246] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 704.495246] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] return self._sync_wrapper(fn, *args, **kwargs) [ 704.495246] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 704.495246] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] self.wait() [ 704.495246] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 704.495246] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] self[:] = self._gt.wait() [ 704.495246] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 704.495246] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] return self._exit_event.wait() [ 704.495246] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 704.495246] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] current.throw(*self._exc) [ 704.495627] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 704.495627] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] result = function(*args, **kwargs) [ 704.495627] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 704.495627] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] return func(*args, **kwargs) [ 704.495627] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 704.495627] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] raise e [ 704.495627] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 704.495627] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] nwinfo = self.network_api.allocate_for_instance( [ 704.495627] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 704.495627] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] created_port_ids = self._update_ports_for_instance( [ 704.495627] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 704.495627] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] with excutils.save_and_reraise_exception(): [ 704.495627] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 704.496288] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] self.force_reraise() [ 704.496288] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 704.496288] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] raise self.value [ 704.496288] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 704.496288] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] updated_port = self._update_port( [ 704.496288] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 704.496288] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] _ensure_no_port_binding_failure(port) [ 704.496288] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 704.496288] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] raise exception.PortBindingFailed(port_id=port['id']) [ 704.496288] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] nova.exception.PortBindingFailed: Binding failed for port 469e6b69-bd24-49e1-a6b3-a58c8e848eba, please check neutron logs for more information. [ 704.496288] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] [ 704.496288] env[61273]: INFO nova.compute.manager [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] Terminating instance [ 704.498187] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Acquiring lock "refresh_cache-8d63e0a8-85a1-400b-a6f0-8e87c7945655" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 704.625908] env[61273]: DEBUG nova.network.neutron [req-19c70215-e128-4580-8702-fc2c1325b268 req-83e9d550-0c03-402b-898b-61c8511050b4 service nova] [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 704.693448] env[61273]: DEBUG nova.network.neutron [req-19c70215-e128-4580-8702-fc2c1325b268 req-83e9d550-0c03-402b-898b-61c8511050b4 service nova] [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 704.694034] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-19c70215-e128-4580-8702-fc2c1325b268 req-83e9d550-0c03-402b-898b-61c8511050b4 service nova] Expecting reply to msg e420d37344024e1db6e869154eec6424 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 704.703217] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e420d37344024e1db6e869154eec6424 [ 704.711972] env[61273]: DEBUG nova.scheduler.client.report [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 704.721800] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] Expecting reply to msg af483be93d68423b810b5389df4f0a66 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 704.743677] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg af483be93d68423b810b5389df4f0a66 [ 704.766641] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Expecting reply to msg 3e873c5348044121a0c5411b091f3f65 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 704.806884] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3e873c5348044121a0c5411b091f3f65 [ 704.833116] env[61273]: INFO nova.compute.manager [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] Took 0.57 seconds to detach 1 volumes for instance. [ 704.835722] env[61273]: DEBUG nova.compute.claims [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 704.836324] env[61273]: DEBUG oslo_concurrency.lockutils [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 704.901410] env[61273]: ERROR nova.compute.manager [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 2d7689b9-6f2a-4729-b658-80c4feb01295, please check neutron logs for more information. [ 704.901410] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 704.901410] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 704.901410] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 704.901410] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 704.901410] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 704.901410] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 704.901410] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 704.901410] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 704.901410] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 704.901410] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 704.901410] env[61273]: ERROR nova.compute.manager raise self.value [ 704.901410] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 704.901410] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 704.901410] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 704.901410] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 704.902020] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 704.902020] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 704.902020] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 2d7689b9-6f2a-4729-b658-80c4feb01295, please check neutron logs for more information. [ 704.902020] env[61273]: ERROR nova.compute.manager [ 704.902444] env[61273]: Traceback (most recent call last): [ 704.902564] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 704.902564] env[61273]: listener.cb(fileno) [ 704.902665] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 704.902665] env[61273]: result = function(*args, **kwargs) [ 704.902939] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 704.902939] env[61273]: return func(*args, **kwargs) [ 704.903049] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 704.903049] env[61273]: raise e [ 704.903130] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 704.903130] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 704.903214] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 704.903214] env[61273]: created_port_ids = self._update_ports_for_instance( [ 704.903294] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 704.903294] env[61273]: with excutils.save_and_reraise_exception(): [ 704.903370] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 704.903370] env[61273]: self.force_reraise() [ 704.903447] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 704.903447] env[61273]: raise self.value [ 704.903526] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 704.903526] env[61273]: updated_port = self._update_port( [ 704.903604] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 704.903604] env[61273]: _ensure_no_port_binding_failure(port) [ 704.903684] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 704.903684] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 704.903775] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 2d7689b9-6f2a-4729-b658-80c4feb01295, please check neutron logs for more information. [ 704.903834] env[61273]: Removing descriptor: 15 [ 705.196501] env[61273]: DEBUG oslo_concurrency.lockutils [req-19c70215-e128-4580-8702-fc2c1325b268 req-83e9d550-0c03-402b-898b-61c8511050b4 service nova] Releasing lock "refresh_cache-8d63e0a8-85a1-400b-a6f0-8e87c7945655" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 705.196789] env[61273]: DEBUG nova.compute.manager [req-19c70215-e128-4580-8702-fc2c1325b268 req-83e9d550-0c03-402b-898b-61c8511050b4 service nova] [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] Received event network-vif-deleted-469e6b69-bd24-49e1-a6b3-a58c8e848eba {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 705.197141] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Acquired lock "refresh_cache-8d63e0a8-85a1-400b-a6f0-8e87c7945655" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 705.197311] env[61273]: DEBUG nova.network.neutron [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 705.197735] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Expecting reply to msg e5b55cd2ee364ad48254ddc5d875ee05 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 705.204716] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e5b55cd2ee364ad48254ddc5d875ee05 [ 705.223921] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.974s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 705.224580] env[61273]: ERROR nova.compute.manager [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 967f4a13-789b-471b-ab8f-72b7480a5afc, please check neutron logs for more information. [ 705.224580] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] Traceback (most recent call last): [ 705.224580] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 705.224580] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] self.driver.spawn(context, instance, image_meta, [ 705.224580] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 705.224580] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] self._vmops.spawn(context, instance, image_meta, injected_files, [ 705.224580] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 705.224580] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] vm_ref = self.build_virtual_machine(instance, [ 705.224580] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 705.224580] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] vif_infos = vmwarevif.get_vif_info(self._session, [ 705.224580] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 705.224968] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] for vif in network_info: [ 705.224968] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 705.224968] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] return self._sync_wrapper(fn, *args, **kwargs) [ 705.224968] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 705.224968] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] self.wait() [ 705.224968] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 705.224968] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] self[:] = self._gt.wait() [ 705.224968] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 705.224968] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] return self._exit_event.wait() [ 705.224968] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 705.224968] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] result = hub.switch() [ 705.224968] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 705.224968] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] return self.greenlet.switch() [ 705.225334] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 705.225334] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] result = function(*args, **kwargs) [ 705.225334] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 705.225334] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] return func(*args, **kwargs) [ 705.225334] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 705.225334] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] raise e [ 705.225334] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 705.225334] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] nwinfo = self.network_api.allocate_for_instance( [ 705.225334] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 705.225334] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] created_port_ids = self._update_ports_for_instance( [ 705.225334] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 705.225334] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] with excutils.save_and_reraise_exception(): [ 705.225334] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 705.225715] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] self.force_reraise() [ 705.225715] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 705.225715] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] raise self.value [ 705.225715] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 705.225715] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] updated_port = self._update_port( [ 705.225715] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 705.225715] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] _ensure_no_port_binding_failure(port) [ 705.225715] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 705.225715] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] raise exception.PortBindingFailed(port_id=port['id']) [ 705.225715] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] nova.exception.PortBindingFailed: Binding failed for port 967f4a13-789b-471b-ab8f-72b7480a5afc, please check neutron logs for more information. [ 705.225715] env[61273]: ERROR nova.compute.manager [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] [ 705.226045] env[61273]: DEBUG nova.compute.utils [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] Binding failed for port 967f4a13-789b-471b-ab8f-72b7480a5afc, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 705.226412] env[61273]: DEBUG oslo_concurrency.lockutils [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.883s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 705.228571] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] Expecting reply to msg 96db22b5b60d4c5aaa684d1b049cb4b9 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 705.229714] env[61273]: DEBUG nova.compute.manager [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] Build of instance 12c47e99-faf4-4083-a46f-4e33c451e980 was re-scheduled: Binding failed for port 967f4a13-789b-471b-ab8f-72b7480a5afc, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 705.230186] env[61273]: DEBUG nova.compute.manager [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 705.230446] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] Acquiring lock "refresh_cache-12c47e99-faf4-4083-a46f-4e33c451e980" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 705.230631] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] Acquired lock "refresh_cache-12c47e99-faf4-4083-a46f-4e33c451e980" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 705.230825] env[61273]: DEBUG nova.network.neutron [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 705.231234] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] Expecting reply to msg a12d4326f5c74aed98bac345155b3a21 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 705.236687] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a12d4326f5c74aed98bac345155b3a21 [ 705.263001] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 96db22b5b60d4c5aaa684d1b049cb4b9 [ 705.269214] env[61273]: DEBUG nova.compute.manager [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 705.293838] env[61273]: DEBUG nova.virt.hardware [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 705.294210] env[61273]: DEBUG nova.virt.hardware [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 705.294388] env[61273]: DEBUG nova.virt.hardware [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 705.294587] env[61273]: DEBUG nova.virt.hardware [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 705.294734] env[61273]: DEBUG nova.virt.hardware [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 705.294885] env[61273]: DEBUG nova.virt.hardware [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 705.295086] env[61273]: DEBUG nova.virt.hardware [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 705.295242] env[61273]: DEBUG nova.virt.hardware [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 705.295406] env[61273]: DEBUG nova.virt.hardware [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 705.295564] env[61273]: DEBUG nova.virt.hardware [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 705.295735] env[61273]: DEBUG nova.virt.hardware [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 705.296617] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a6e0051-c3b8-445e-814c-a1e5d5e94d9d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.304858] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3538c92f-a50f-4f3d-aeef-083f8d4a1304 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.318409] env[61273]: ERROR nova.compute.manager [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 2d7689b9-6f2a-4729-b658-80c4feb01295, please check neutron logs for more information. [ 705.318409] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] Traceback (most recent call last): [ 705.318409] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 705.318409] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] yield resources [ 705.318409] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 705.318409] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] self.driver.spawn(context, instance, image_meta, [ 705.318409] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 705.318409] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 705.318409] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 705.318409] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] vm_ref = self.build_virtual_machine(instance, [ 705.318409] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 705.318853] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] vif_infos = vmwarevif.get_vif_info(self._session, [ 705.318853] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 705.318853] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] for vif in network_info: [ 705.318853] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 705.318853] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] return self._sync_wrapper(fn, *args, **kwargs) [ 705.318853] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 705.318853] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] self.wait() [ 705.318853] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 705.318853] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] self[:] = self._gt.wait() [ 705.318853] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 705.318853] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] return self._exit_event.wait() [ 705.318853] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 705.318853] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] current.throw(*self._exc) [ 705.319322] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 705.319322] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] result = function(*args, **kwargs) [ 705.319322] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 705.319322] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] return func(*args, **kwargs) [ 705.319322] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 705.319322] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] raise e [ 705.319322] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 705.319322] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] nwinfo = self.network_api.allocate_for_instance( [ 705.319322] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 705.319322] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] created_port_ids = self._update_ports_for_instance( [ 705.319322] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 705.319322] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] with excutils.save_and_reraise_exception(): [ 705.319322] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 705.319771] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] self.force_reraise() [ 705.319771] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 705.319771] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] raise self.value [ 705.319771] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 705.319771] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] updated_port = self._update_port( [ 705.319771] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 705.319771] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] _ensure_no_port_binding_failure(port) [ 705.319771] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 705.319771] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] raise exception.PortBindingFailed(port_id=port['id']) [ 705.319771] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] nova.exception.PortBindingFailed: Binding failed for port 2d7689b9-6f2a-4729-b658-80c4feb01295, please check neutron logs for more information. [ 705.319771] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] [ 705.319771] env[61273]: INFO nova.compute.manager [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] Terminating instance [ 705.320741] env[61273]: DEBUG oslo_concurrency.lockutils [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Acquiring lock "refresh_cache-bf40cc8c-4729-49c5-8c9d-e3ee09606aa5" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 705.320871] env[61273]: DEBUG oslo_concurrency.lockutils [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Acquired lock "refresh_cache-bf40cc8c-4729-49c5-8c9d-e3ee09606aa5" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 705.321075] env[61273]: DEBUG nova.network.neutron [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 705.321435] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Expecting reply to msg b82b3ec27bfc4a448f03fadcb2a22365 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 705.328040] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b82b3ec27bfc4a448f03fadcb2a22365 [ 705.715552] env[61273]: DEBUG nova.network.neutron [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 705.757182] env[61273]: DEBUG nova.network.neutron [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 705.805595] env[61273]: DEBUG nova.network.neutron [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 705.806133] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Expecting reply to msg 68541901e16240909ed4f7d346c2c8cd in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 705.815121] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 68541901e16240909ed4f7d346c2c8cd [ 705.839201] env[61273]: DEBUG nova.network.neutron [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 705.848411] env[61273]: DEBUG nova.network.neutron [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 705.848859] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] Expecting reply to msg cc433014b3844dd3b8b1bab263dd5201 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 705.856761] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cc433014b3844dd3b8b1bab263dd5201 [ 705.910127] env[61273]: DEBUG nova.network.neutron [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 705.910979] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Expecting reply to msg 0c1a2b084bfb45bd9c0e63488889797f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 705.919899] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0c1a2b084bfb45bd9c0e63488889797f [ 706.053601] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-726379db-a728-4486-9490-f42cc17955b7 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.061366] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d70a1754-ad17-429b-b036-2ae6145fdde4 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.091400] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49da72e2-4d6c-48a5-a333-ad5ff7e3faeb {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.098107] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8b44bc0-6bb7-45ae-b8f9-7fc1c04d52c3 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.110561] env[61273]: DEBUG nova.compute.provider_tree [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 706.111086] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] Expecting reply to msg d8714ab4f6294c2896999f3b853e1dfa in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 706.117835] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d8714ab4f6294c2896999f3b853e1dfa [ 706.171026] env[61273]: DEBUG nova.compute.manager [req-c225173b-31de-47f6-99c4-62eb7b574b34 req-5a0dee19-ac3b-4c99-bdb9-1cf748da7da8 service nova] [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] Received event network-changed-2d7689b9-6f2a-4729-b658-80c4feb01295 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 706.171280] env[61273]: DEBUG nova.compute.manager [req-c225173b-31de-47f6-99c4-62eb7b574b34 req-5a0dee19-ac3b-4c99-bdb9-1cf748da7da8 service nova] [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] Refreshing instance network info cache due to event network-changed-2d7689b9-6f2a-4729-b658-80c4feb01295. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 706.171398] env[61273]: DEBUG oslo_concurrency.lockutils [req-c225173b-31de-47f6-99c4-62eb7b574b34 req-5a0dee19-ac3b-4c99-bdb9-1cf748da7da8 service nova] Acquiring lock "refresh_cache-bf40cc8c-4729-49c5-8c9d-e3ee09606aa5" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 706.311821] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Releasing lock "refresh_cache-8d63e0a8-85a1-400b-a6f0-8e87c7945655" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 706.312416] env[61273]: DEBUG nova.compute.manager [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 706.312774] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cc04ac2e-c881-4e13-a61d-01cd36f8dc4a {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.321549] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25fedf63-ba58-4a10-bb3f-3618621d03f1 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.344045] env[61273]: WARNING nova.virt.vmwareapi.driver [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance 8d63e0a8-85a1-400b-a6f0-8e87c7945655 could not be found. [ 706.344276] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 706.344550] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4aac0096-cd48-4d1c-8389-0861e1c1cf09 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.351781] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67a2c850-cce7-43a9-ab96-23b25d11ea84 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.363456] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] Releasing lock "refresh_cache-12c47e99-faf4-4083-a46f-4e33c451e980" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 706.363601] env[61273]: DEBUG nova.compute.manager [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 706.363787] env[61273]: DEBUG nova.compute.manager [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 706.363945] env[61273]: DEBUG nova.network.neutron [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 706.376856] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8d63e0a8-85a1-400b-a6f0-8e87c7945655 could not be found. [ 706.377080] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 706.377260] env[61273]: INFO nova.compute.manager [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] Took 0.06 seconds to destroy the instance on the hypervisor. [ 706.377504] env[61273]: DEBUG oslo.service.loopingcall [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 706.377723] env[61273]: DEBUG nova.compute.manager [-] [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 706.377818] env[61273]: DEBUG nova.network.neutron [-] [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 706.379886] env[61273]: DEBUG nova.network.neutron [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 706.380401] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] Expecting reply to msg 5ff0857e3bde4f87bfb75c64611a8a7f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 706.387971] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5ff0857e3bde4f87bfb75c64611a8a7f [ 706.394960] env[61273]: DEBUG nova.network.neutron [-] [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 706.396029] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg c3f9500285014e0a868b647663136fa0 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 706.402300] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c3f9500285014e0a868b647663136fa0 [ 706.414157] env[61273]: DEBUG oslo_concurrency.lockutils [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Releasing lock "refresh_cache-bf40cc8c-4729-49c5-8c9d-e3ee09606aa5" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 706.414381] env[61273]: DEBUG nova.compute.manager [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 706.414571] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 706.414863] env[61273]: DEBUG oslo_concurrency.lockutils [req-c225173b-31de-47f6-99c4-62eb7b574b34 req-5a0dee19-ac3b-4c99-bdb9-1cf748da7da8 service nova] Acquired lock "refresh_cache-bf40cc8c-4729-49c5-8c9d-e3ee09606aa5" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 706.415029] env[61273]: DEBUG nova.network.neutron [req-c225173b-31de-47f6-99c4-62eb7b574b34 req-5a0dee19-ac3b-4c99-bdb9-1cf748da7da8 service nova] [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] Refreshing network info cache for port 2d7689b9-6f2a-4729-b658-80c4feb01295 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 706.415447] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-c225173b-31de-47f6-99c4-62eb7b574b34 req-5a0dee19-ac3b-4c99-bdb9-1cf748da7da8 service nova] Expecting reply to msg 865d1aca2d0c4fb9af2e8ec4df7faf62 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 706.416181] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ff5422bc-fae2-4613-bb29-07713fb9aa42 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.421684] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 865d1aca2d0c4fb9af2e8ec4df7faf62 [ 706.426391] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7f1771f-1700-425b-a9e5-9b6047298fa2 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.450423] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance bf40cc8c-4729-49c5-8c9d-e3ee09606aa5 could not be found. [ 706.450699] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 706.450938] env[61273]: INFO nova.compute.manager [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] Took 0.04 seconds to destroy the instance on the hypervisor. [ 706.451220] env[61273]: DEBUG oslo.service.loopingcall [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 706.451704] env[61273]: DEBUG nova.compute.manager [-] [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 706.451852] env[61273]: DEBUG nova.network.neutron [-] [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 706.467695] env[61273]: DEBUG nova.network.neutron [-] [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 706.468276] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg f07f787fea5f4d2fa2af9d4822b1370b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 706.474887] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f07f787fea5f4d2fa2af9d4822b1370b [ 706.614040] env[61273]: DEBUG nova.scheduler.client.report [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 706.616470] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] Expecting reply to msg 5c870fe0e95140fe952a1fe44d2f4a04 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 706.628225] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5c870fe0e95140fe952a1fe44d2f4a04 [ 706.883232] env[61273]: DEBUG nova.network.neutron [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 706.883840] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] Expecting reply to msg aecf9192310c489e8196df7b6701c77c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 706.891909] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aecf9192310c489e8196df7b6701c77c [ 706.896908] env[61273]: DEBUG nova.network.neutron [-] [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 706.897260] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 2af04c3d99f94ac7984206deef7375ab in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 706.904544] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2af04c3d99f94ac7984206deef7375ab [ 706.934507] env[61273]: DEBUG nova.network.neutron [req-c225173b-31de-47f6-99c4-62eb7b574b34 req-5a0dee19-ac3b-4c99-bdb9-1cf748da7da8 service nova] [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 706.971094] env[61273]: DEBUG nova.network.neutron [-] [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 706.971519] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg dbf434fbb7ce416fb394acb4803f887b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 706.981231] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dbf434fbb7ce416fb394acb4803f887b [ 706.996885] env[61273]: DEBUG nova.network.neutron [req-c225173b-31de-47f6-99c4-62eb7b574b34 req-5a0dee19-ac3b-4c99-bdb9-1cf748da7da8 service nova] [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 706.997402] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-c225173b-31de-47f6-99c4-62eb7b574b34 req-5a0dee19-ac3b-4c99-bdb9-1cf748da7da8 service nova] Expecting reply to msg a170e393801e4bd482ef63510661197c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 707.005322] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a170e393801e4bd482ef63510661197c [ 707.119302] env[61273]: DEBUG oslo_concurrency.lockutils [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.893s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 707.120141] env[61273]: ERROR nova.compute.manager [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port d1633ad7-ab0c-43f3-bf09-b74926bf1ccf, please check neutron logs for more information. [ 707.120141] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] Traceback (most recent call last): [ 707.120141] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 707.120141] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] self.driver.spawn(context, instance, image_meta, [ 707.120141] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 707.120141] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] self._vmops.spawn(context, instance, image_meta, injected_files, [ 707.120141] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 707.120141] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] vm_ref = self.build_virtual_machine(instance, [ 707.120141] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 707.120141] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] vif_infos = vmwarevif.get_vif_info(self._session, [ 707.120141] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 707.120509] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] for vif in network_info: [ 707.120509] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 707.120509] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] return self._sync_wrapper(fn, *args, **kwargs) [ 707.120509] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 707.120509] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] self.wait() [ 707.120509] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 707.120509] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] self[:] = self._gt.wait() [ 707.120509] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 707.120509] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] return self._exit_event.wait() [ 707.120509] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 707.120509] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] result = hub.switch() [ 707.120509] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 707.120509] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] return self.greenlet.switch() [ 707.120884] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 707.120884] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] result = function(*args, **kwargs) [ 707.120884] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 707.120884] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] return func(*args, **kwargs) [ 707.120884] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 707.120884] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] raise e [ 707.120884] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 707.120884] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] nwinfo = self.network_api.allocate_for_instance( [ 707.120884] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 707.120884] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] created_port_ids = self._update_ports_for_instance( [ 707.120884] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 707.120884] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] with excutils.save_and_reraise_exception(): [ 707.120884] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 707.121263] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] self.force_reraise() [ 707.121263] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 707.121263] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] raise self.value [ 707.121263] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 707.121263] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] updated_port = self._update_port( [ 707.121263] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 707.121263] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] _ensure_no_port_binding_failure(port) [ 707.121263] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 707.121263] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] raise exception.PortBindingFailed(port_id=port['id']) [ 707.121263] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] nova.exception.PortBindingFailed: Binding failed for port d1633ad7-ab0c-43f3-bf09-b74926bf1ccf, please check neutron logs for more information. [ 707.121263] env[61273]: ERROR nova.compute.manager [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] [ 707.121634] env[61273]: DEBUG nova.compute.utils [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] Binding failed for port d1633ad7-ab0c-43f3-bf09-b74926bf1ccf, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 707.122115] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.769s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 707.123920] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] Expecting reply to msg 939850f44c0f47cbad341c69970cc3e0 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 707.125214] env[61273]: DEBUG nova.compute.manager [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] Build of instance 5ea287cd-ba85-446d-85d0-5a050fe49f17 was re-scheduled: Binding failed for port d1633ad7-ab0c-43f3-bf09-b74926bf1ccf, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 707.125450] env[61273]: DEBUG nova.compute.manager [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 707.125672] env[61273]: DEBUG oslo_concurrency.lockutils [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] Acquiring lock "refresh_cache-5ea287cd-ba85-446d-85d0-5a050fe49f17" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 707.125821] env[61273]: DEBUG oslo_concurrency.lockutils [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] Acquired lock "refresh_cache-5ea287cd-ba85-446d-85d0-5a050fe49f17" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 707.125976] env[61273]: DEBUG nova.network.neutron [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 707.126320] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] Expecting reply to msg 6625dcab70c44564b3703d98d6ab64f6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 707.132600] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6625dcab70c44564b3703d98d6ab64f6 [ 707.164438] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 939850f44c0f47cbad341c69970cc3e0 [ 707.388714] env[61273]: INFO nova.compute.manager [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] [instance: 12c47e99-faf4-4083-a46f-4e33c451e980] Took 1.02 seconds to deallocate network for instance. [ 707.390472] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] Expecting reply to msg b2099ceec6bd41f69fea41464c0f16f5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 707.399326] env[61273]: INFO nova.compute.manager [-] [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] Took 1.02 seconds to deallocate network for instance. [ 707.425074] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b2099ceec6bd41f69fea41464c0f16f5 [ 707.473614] env[61273]: INFO nova.compute.manager [-] [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] Took 1.02 seconds to deallocate network for instance. [ 707.475971] env[61273]: DEBUG nova.compute.claims [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 707.476207] env[61273]: DEBUG oslo_concurrency.lockutils [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 707.499694] env[61273]: DEBUG oslo_concurrency.lockutils [req-c225173b-31de-47f6-99c4-62eb7b574b34 req-5a0dee19-ac3b-4c99-bdb9-1cf748da7da8 service nova] Releasing lock "refresh_cache-bf40cc8c-4729-49c5-8c9d-e3ee09606aa5" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 707.499694] env[61273]: DEBUG nova.compute.manager [req-c225173b-31de-47f6-99c4-62eb7b574b34 req-5a0dee19-ac3b-4c99-bdb9-1cf748da7da8 service nova] [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] Received event network-vif-deleted-2d7689b9-6f2a-4729-b658-80c4feb01295 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 707.652775] env[61273]: DEBUG nova.network.neutron [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 707.736850] env[61273]: DEBUG nova.network.neutron [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 707.737392] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] Expecting reply to msg d552461bc9c34102ae11592ddbc04c7e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 707.746847] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d552461bc9c34102ae11592ddbc04c7e [ 707.894772] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] Expecting reply to msg c598829afecb4a61a0fac27e0fc4cf58 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 707.928683] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8779aa4a-0262-453f-9956-df77946a0764 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.931701] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c598829afecb4a61a0fac27e0fc4cf58 [ 707.937053] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8b5dbd2-4611-4442-9f19-60b7f55f3731 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.969840] env[61273]: INFO nova.compute.manager [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] Took 0.57 seconds to detach 1 volumes for instance. [ 707.972121] env[61273]: DEBUG nova.compute.claims [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 707.972232] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 707.972999] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98058183-d980-47a8-9a02-d7284bf64c7f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.980288] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88ef7074-2309-4ea9-9139-bd1f1d496425 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.993187] env[61273]: DEBUG nova.compute.provider_tree [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 707.993685] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] Expecting reply to msg a0bcc0e282f94978a577d5580f3824dd in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 708.000667] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a0bcc0e282f94978a577d5580f3824dd [ 708.242597] env[61273]: DEBUG oslo_concurrency.lockutils [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] Releasing lock "refresh_cache-5ea287cd-ba85-446d-85d0-5a050fe49f17" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 708.242597] env[61273]: DEBUG nova.compute.manager [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 708.242597] env[61273]: DEBUG nova.compute.manager [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 708.242597] env[61273]: DEBUG nova.network.neutron [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 708.258492] env[61273]: DEBUG nova.network.neutron [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 708.258492] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] Expecting reply to msg 4b50e98b416840f4891fc4550a4b70c2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 708.265399] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4b50e98b416840f4891fc4550a4b70c2 [ 708.422480] env[61273]: INFO nova.scheduler.client.report [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] Deleted allocations for instance 12c47e99-faf4-4083-a46f-4e33c451e980 [ 708.425012] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] Expecting reply to msg 8e4c47c49e2e4485871b46754ecd11aa in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 708.442254] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8e4c47c49e2e4485871b46754ecd11aa [ 708.499936] env[61273]: DEBUG nova.scheduler.client.report [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 708.499936] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] Expecting reply to msg ee7aeac4abe64bb5a821f33de2562e56 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 708.513306] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ee7aeac4abe64bb5a821f33de2562e56 [ 708.760234] env[61273]: DEBUG nova.network.neutron [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 708.760785] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] Expecting reply to msg 796c46e5b658402880724f84dab59f1a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 708.769549] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 796c46e5b658402880724f84dab59f1a [ 708.927322] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c6e2e821-28f8-4d2a-88df-ec57e567ef48 tempest-FloatingIPsAssociationTestJSON-885379146 tempest-FloatingIPsAssociationTestJSON-885379146-project-member] Lock "12c47e99-faf4-4083-a46f-4e33c451e980" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 135.107s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 708.927932] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] Expecting reply to msg e8126de21732473da06a29dadb3dd1a4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 708.937594] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e8126de21732473da06a29dadb3dd1a4 [ 709.002537] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.880s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 709.003258] env[61273]: ERROR nova.compute.manager [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] [instance: 297a5546-6159-462c-a436-032d94855c00] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 230ecbbd-5bca-48b4-9f6f-7b581a49f268, please check neutron logs for more information. [ 709.003258] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] Traceback (most recent call last): [ 709.003258] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 709.003258] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] self.driver.spawn(context, instance, image_meta, [ 709.003258] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 709.003258] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] self._vmops.spawn(context, instance, image_meta, injected_files, [ 709.003258] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 709.003258] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] vm_ref = self.build_virtual_machine(instance, [ 709.003258] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 709.003258] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] vif_infos = vmwarevif.get_vif_info(self._session, [ 709.003258] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 709.003770] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] for vif in network_info: [ 709.003770] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 709.003770] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] return self._sync_wrapper(fn, *args, **kwargs) [ 709.003770] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 709.003770] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] self.wait() [ 709.003770] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 709.003770] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] self[:] = self._gt.wait() [ 709.003770] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 709.003770] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] return self._exit_event.wait() [ 709.003770] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 709.003770] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] result = hub.switch() [ 709.003770] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 709.003770] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] return self.greenlet.switch() [ 709.004246] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 709.004246] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] result = function(*args, **kwargs) [ 709.004246] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 709.004246] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] return func(*args, **kwargs) [ 709.004246] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 709.004246] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] raise e [ 709.004246] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 709.004246] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] nwinfo = self.network_api.allocate_for_instance( [ 709.004246] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 709.004246] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] created_port_ids = self._update_ports_for_instance( [ 709.004246] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 709.004246] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] with excutils.save_and_reraise_exception(): [ 709.004246] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 709.004677] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] self.force_reraise() [ 709.004677] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 709.004677] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] raise self.value [ 709.004677] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 709.004677] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] updated_port = self._update_port( [ 709.004677] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 709.004677] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] _ensure_no_port_binding_failure(port) [ 709.004677] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 709.004677] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] raise exception.PortBindingFailed(port_id=port['id']) [ 709.004677] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] nova.exception.PortBindingFailed: Binding failed for port 230ecbbd-5bca-48b4-9f6f-7b581a49f268, please check neutron logs for more information. [ 709.004677] env[61273]: ERROR nova.compute.manager [instance: 297a5546-6159-462c-a436-032d94855c00] [ 709.005159] env[61273]: DEBUG nova.compute.utils [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] [instance: 297a5546-6159-462c-a436-032d94855c00] Binding failed for port 230ecbbd-5bca-48b4-9f6f-7b581a49f268, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 709.005204] env[61273]: DEBUG oslo_concurrency.lockutils [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.860s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 709.006619] env[61273]: INFO nova.compute.claims [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] [instance: 23774aa5-1608-495f-8015-29e25f856c69] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 709.008166] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] Expecting reply to msg ccaaeda897074893811f2b46c9eb0623 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 709.013291] env[61273]: DEBUG nova.compute.manager [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] [instance: 297a5546-6159-462c-a436-032d94855c00] Build of instance 297a5546-6159-462c-a436-032d94855c00 was re-scheduled: Binding failed for port 230ecbbd-5bca-48b4-9f6f-7b581a49f268, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 709.013291] env[61273]: DEBUG nova.compute.manager [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] [instance: 297a5546-6159-462c-a436-032d94855c00] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 709.013291] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] Acquiring lock "refresh_cache-297a5546-6159-462c-a436-032d94855c00" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 709.013291] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] Acquired lock "refresh_cache-297a5546-6159-462c-a436-032d94855c00" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 709.013767] env[61273]: DEBUG nova.network.neutron [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] [instance: 297a5546-6159-462c-a436-032d94855c00] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 709.013767] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] Expecting reply to msg f4cf9c5580294897aa5dfe1264c8be59 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 709.019344] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f4cf9c5580294897aa5dfe1264c8be59 [ 709.054687] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ccaaeda897074893811f2b46c9eb0623 [ 709.262856] env[61273]: INFO nova.compute.manager [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] [instance: 5ea287cd-ba85-446d-85d0-5a050fe49f17] Took 1.02 seconds to deallocate network for instance. [ 709.264779] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] Expecting reply to msg 2c4fc75d6f704749b49246848eedeef7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 709.304161] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2c4fc75d6f704749b49246848eedeef7 [ 709.430368] env[61273]: DEBUG nova.compute.manager [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 709.432643] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] Expecting reply to msg 5f7e3358ff494216853f63ff9ee57afb in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 709.467368] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5f7e3358ff494216853f63ff9ee57afb [ 709.514288] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] Expecting reply to msg 2269f5cbd1034499b36ba5536bc1ea7a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 709.522945] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2269f5cbd1034499b36ba5536bc1ea7a [ 709.531836] env[61273]: DEBUG nova.network.neutron [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] [instance: 297a5546-6159-462c-a436-032d94855c00] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 709.670449] env[61273]: DEBUG nova.network.neutron [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] [instance: 297a5546-6159-462c-a436-032d94855c00] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 709.671014] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] Expecting reply to msg 9761564cdb4f45cb98120807c9c67409 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 709.679504] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9761564cdb4f45cb98120807c9c67409 [ 709.769699] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] Expecting reply to msg b23643f044eb4b1d95c1c8c50b31909a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 709.819044] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b23643f044eb4b1d95c1c8c50b31909a [ 709.952945] env[61273]: DEBUG oslo_concurrency.lockutils [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 710.176038] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] Releasing lock "refresh_cache-297a5546-6159-462c-a436-032d94855c00" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 710.176038] env[61273]: DEBUG nova.compute.manager [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 710.176038] env[61273]: DEBUG nova.compute.manager [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] [instance: 297a5546-6159-462c-a436-032d94855c00] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 710.176038] env[61273]: DEBUG nova.network.neutron [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] [instance: 297a5546-6159-462c-a436-032d94855c00] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 710.197217] env[61273]: DEBUG nova.network.neutron [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] [instance: 297a5546-6159-462c-a436-032d94855c00] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 710.197893] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] Expecting reply to msg 34360c80f4ec454296c8dad6fdc7882b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 710.206076] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 34360c80f4ec454296c8dad6fdc7882b [ 710.296942] env[61273]: INFO nova.scheduler.client.report [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] Deleted allocations for instance 5ea287cd-ba85-446d-85d0-5a050fe49f17 [ 710.305207] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] Expecting reply to msg 15e345636f4b48c18b79219ad1822304 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 710.321578] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 15e345636f4b48c18b79219ad1822304 [ 710.411533] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf1b14ff-3e4d-4f8b-a79b-b00a52bc3cd7 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.418964] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-469c6576-dd4a-4837-a08f-28b1e347d6b3 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.452387] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2807e9a6-b835-4660-874e-7266bf59b5e5 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.460347] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18ef6831-d6ac-413d-800f-ba8ddb24d602 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.473702] env[61273]: DEBUG nova.compute.provider_tree [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 710.474343] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] Expecting reply to msg 454291c6297c41be9f1647810a9dafc4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 710.482953] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 454291c6297c41be9f1647810a9dafc4 [ 710.699865] env[61273]: DEBUG nova.network.neutron [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] [instance: 297a5546-6159-462c-a436-032d94855c00] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 710.700385] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] Expecting reply to msg 83705f3c61b746fdafa43df589cf5d87 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 710.710307] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 83705f3c61b746fdafa43df589cf5d87 [ 710.807741] env[61273]: DEBUG oslo_concurrency.lockutils [None req-258cae5c-7377-4c04-81a0-3d6e8152b08a tempest-ServerActionsTestJSON-129923650 tempest-ServerActionsTestJSON-129923650-project-member] Lock "5ea287cd-ba85-446d-85d0-5a050fe49f17" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 135.202s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 710.808354] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Expecting reply to msg afcc458456074c7bb2ad1252c3986fb7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 710.821409] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg afcc458456074c7bb2ad1252c3986fb7 [ 710.977071] env[61273]: DEBUG nova.scheduler.client.report [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 710.980184] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] Expecting reply to msg 31519997714148ce8b50e6b1ed54f2e7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 710.994875] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 31519997714148ce8b50e6b1ed54f2e7 [ 711.202959] env[61273]: INFO nova.compute.manager [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] [instance: 297a5546-6159-462c-a436-032d94855c00] Took 1.03 seconds to deallocate network for instance. [ 711.204772] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] Expecting reply to msg d0e9af1524f7465aa817074bc4aa6218 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 711.238547] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d0e9af1524f7465aa817074bc4aa6218 [ 711.311657] env[61273]: DEBUG nova.compute.manager [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: ca8a38c7-a81c-407a-9558-3d15e492d9fa] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 711.313511] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Expecting reply to msg 3e9717afc46e4277840b277bcbd0b3af in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 711.347420] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3e9717afc46e4277840b277bcbd0b3af [ 711.482009] env[61273]: DEBUG oslo_concurrency.lockutils [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.477s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 711.482589] env[61273]: DEBUG nova.compute.manager [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] [instance: 23774aa5-1608-495f-8015-29e25f856c69] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 711.484400] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] Expecting reply to msg 9bd6c55ff6474e17a80dc2cb2fe5ab37 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 711.485438] env[61273]: DEBUG oslo_concurrency.lockutils [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 18.129s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 711.488031] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Expecting reply to msg 60cebca942184677b5ced6558f857cac in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 711.518069] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9bd6c55ff6474e17a80dc2cb2fe5ab37 [ 711.522276] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 60cebca942184677b5ced6558f857cac [ 711.709273] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] Expecting reply to msg 93669a38b4164a23a5071bb960605c4f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 711.740365] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 93669a38b4164a23a5071bb960605c4f [ 711.835612] env[61273]: DEBUG oslo_concurrency.lockutils [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 711.990229] env[61273]: DEBUG nova.compute.utils [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 711.990871] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] Expecting reply to msg 11f76cc1aca74a8dbfc4f3129edb89d5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 711.994872] env[61273]: DEBUG nova.compute.manager [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] [instance: 23774aa5-1608-495f-8015-29e25f856c69] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 711.995047] env[61273]: DEBUG nova.network.neutron [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] [instance: 23774aa5-1608-495f-8015-29e25f856c69] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 712.007202] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 11f76cc1aca74a8dbfc4f3129edb89d5 [ 712.073908] env[61273]: DEBUG nova.policy [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cc86d52877f44ed9818e3455d93bacfc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd6c92f944a2b4fb88208d093a86b5a93', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 712.239497] env[61273]: INFO nova.scheduler.client.report [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] Deleted allocations for instance 297a5546-6159-462c-a436-032d94855c00 [ 712.254742] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] Expecting reply to msg cc25fe3b6b5a4e87abb4905ab9d10001 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 712.270932] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cc25fe3b6b5a4e87abb4905ab9d10001 [ 712.315077] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dccd1598-122a-4d3d-a881-c913fc0d9091 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.322941] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6277eb3-7ada-4416-a4ee-0ff78a418bfb {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.357398] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e34fd911-4378-4887-b802-7779106e0683 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.366035] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4705c86e-43fc-4c26-8e5f-46960258b063 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.380652] env[61273]: DEBUG nova.compute.provider_tree [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 712.381193] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Expecting reply to msg 6aff98a89fa44278a9fdbcaf6f4cd5c1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 712.390561] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6aff98a89fa44278a9fdbcaf6f4cd5c1 [ 712.495785] env[61273]: DEBUG nova.compute.manager [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] [instance: 23774aa5-1608-495f-8015-29e25f856c69] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 712.497517] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] Expecting reply to msg c9357c1d8ecc442dbf9764a41c8bcff4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 712.532652] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c9357c1d8ecc442dbf9764a41c8bcff4 [ 712.606106] env[61273]: DEBUG nova.network.neutron [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] [instance: 23774aa5-1608-495f-8015-29e25f856c69] Successfully created port: f1a08c1d-bfd9-47a7-9221-49193552bcba {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 712.758485] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5d4a7b16-5b36-4499-ab71-25e57615bc92 tempest-ServersNegativeTestJSON-1808660442 tempest-ServersNegativeTestJSON-1808660442-project-member] Lock "297a5546-6159-462c-a436-032d94855c00" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 134.915s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 712.759078] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Expecting reply to msg 447153fb547d4abbafd48e869cba9f14 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 712.768809] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 447153fb547d4abbafd48e869cba9f14 [ 712.883870] env[61273]: DEBUG nova.scheduler.client.report [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 712.886299] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Expecting reply to msg f795bc2298ac476790d90c2d3bd9c222 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 712.898431] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f795bc2298ac476790d90c2d3bd9c222 [ 713.005038] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] Expecting reply to msg b03b487eca554647b4f003b8fe8fc342 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 713.049759] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b03b487eca554647b4f003b8fe8fc342 [ 713.261743] env[61273]: DEBUG nova.compute.manager [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 713.263495] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Expecting reply to msg 562509139eb544fdbf814720bf127022 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 713.305083] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 562509139eb544fdbf814720bf127022 [ 713.389388] env[61273]: DEBUG oslo_concurrency.lockutils [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.904s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 713.390089] env[61273]: ERROR nova.compute.manager [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port ea7c2700-8347-4baa-b73d-d330af13f7ab, please check neutron logs for more information. [ 713.390089] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] Traceback (most recent call last): [ 713.390089] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 713.390089] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] self.driver.spawn(context, instance, image_meta, [ 713.390089] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 713.390089] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 713.390089] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 713.390089] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] vm_ref = self.build_virtual_machine(instance, [ 713.390089] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 713.390089] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] vif_infos = vmwarevif.get_vif_info(self._session, [ 713.390089] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 713.390541] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] for vif in network_info: [ 713.390541] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 713.390541] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] return self._sync_wrapper(fn, *args, **kwargs) [ 713.390541] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 713.390541] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] self.wait() [ 713.390541] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 713.390541] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] self[:] = self._gt.wait() [ 713.390541] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 713.390541] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] return self._exit_event.wait() [ 713.390541] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 713.390541] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] result = hub.switch() [ 713.390541] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 713.390541] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] return self.greenlet.switch() [ 713.390988] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 713.390988] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] result = function(*args, **kwargs) [ 713.390988] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 713.390988] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] return func(*args, **kwargs) [ 713.390988] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 713.390988] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] raise e [ 713.390988] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 713.390988] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] nwinfo = self.network_api.allocate_for_instance( [ 713.390988] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 713.390988] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] created_port_ids = self._update_ports_for_instance( [ 713.390988] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 713.390988] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] with excutils.save_and_reraise_exception(): [ 713.390988] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 713.391430] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] self.force_reraise() [ 713.391430] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 713.391430] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] raise self.value [ 713.391430] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 713.391430] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] updated_port = self._update_port( [ 713.391430] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 713.391430] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] _ensure_no_port_binding_failure(port) [ 713.391430] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 713.391430] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] raise exception.PortBindingFailed(port_id=port['id']) [ 713.391430] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] nova.exception.PortBindingFailed: Binding failed for port ea7c2700-8347-4baa-b73d-d330af13f7ab, please check neutron logs for more information. [ 713.391430] env[61273]: ERROR nova.compute.manager [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] [ 713.391863] env[61273]: DEBUG nova.compute.utils [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] Binding failed for port ea7c2700-8347-4baa-b73d-d330af13f7ab, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 713.392044] env[61273]: DEBUG oslo_concurrency.lockutils [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.210s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 713.393834] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] Expecting reply to msg eefaf3e108f04f5d9194686ef67b7a17 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 713.394996] env[61273]: DEBUG nova.compute.manager [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] Build of instance 2b87dfbe-2b94-4787-a795-94f8b63f651c was re-scheduled: Binding failed for port ea7c2700-8347-4baa-b73d-d330af13f7ab, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 713.395413] env[61273]: DEBUG nova.compute.manager [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 713.395631] env[61273]: DEBUG oslo_concurrency.lockutils [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Acquiring lock "refresh_cache-2b87dfbe-2b94-4787-a795-94f8b63f651c" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 713.395796] env[61273]: DEBUG oslo_concurrency.lockutils [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Acquired lock "refresh_cache-2b87dfbe-2b94-4787-a795-94f8b63f651c" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 713.395968] env[61273]: DEBUG nova.network.neutron [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 713.396354] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Expecting reply to msg 97b6e336eeba49a28e288794de15d93e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 713.408924] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 97b6e336eeba49a28e288794de15d93e [ 713.431557] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eefaf3e108f04f5d9194686ef67b7a17 [ 713.504751] env[61273]: DEBUG nova.compute.manager [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] [instance: 23774aa5-1608-495f-8015-29e25f856c69] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 713.525401] env[61273]: DEBUG nova.virt.hardware [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 713.525653] env[61273]: DEBUG nova.virt.hardware [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 713.525900] env[61273]: DEBUG nova.virt.hardware [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 713.525978] env[61273]: DEBUG nova.virt.hardware [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 713.526115] env[61273]: DEBUG nova.virt.hardware [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 713.526254] env[61273]: DEBUG nova.virt.hardware [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 713.526451] env[61273]: DEBUG nova.virt.hardware [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 713.526600] env[61273]: DEBUG nova.virt.hardware [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 713.526758] env[61273]: DEBUG nova.virt.hardware [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 713.526914] env[61273]: DEBUG nova.virt.hardware [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 713.527078] env[61273]: DEBUG nova.virt.hardware [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 713.527942] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbe19110-ccd2-4fc8-8280-53eda479871d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.536199] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b651020-ad3f-410d-b508-68a14921da1c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.785651] env[61273]: DEBUG oslo_concurrency.lockutils [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 713.918571] env[61273]: DEBUG nova.network.neutron [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 714.008173] env[61273]: DEBUG nova.network.neutron [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 714.008717] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Expecting reply to msg 9a9252d0be554e918c3e2cfb2dfbf2df in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 714.018815] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9a9252d0be554e918c3e2cfb2dfbf2df [ 714.242871] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d34e4c70-a1b5-4c26-a95e-65421c5d221e {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.251768] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78c65431-167f-48fb-b735-bef867bc32b3 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.281475] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f60901fc-ec3c-4bd8-bed3-8e6881df45be {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.288761] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f76fe39-2822-4c78-a4e0-29a6e82482b5 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.301932] env[61273]: DEBUG nova.compute.provider_tree [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 714.302454] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] Expecting reply to msg ebaddbc53a3a4208ae1f2dfeed0eb4f1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 714.310914] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ebaddbc53a3a4208ae1f2dfeed0eb4f1 [ 714.392371] env[61273]: DEBUG nova.compute.manager [req-e35d060b-5d96-44ee-ac29-629d0edae389 req-4db23249-241c-4dce-a235-929a5fe7bfb3 service nova] [instance: 23774aa5-1608-495f-8015-29e25f856c69] Received event network-changed-f1a08c1d-bfd9-47a7-9221-49193552bcba {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 714.393388] env[61273]: DEBUG nova.compute.manager [req-e35d060b-5d96-44ee-ac29-629d0edae389 req-4db23249-241c-4dce-a235-929a5fe7bfb3 service nova] [instance: 23774aa5-1608-495f-8015-29e25f856c69] Refreshing instance network info cache due to event network-changed-f1a08c1d-bfd9-47a7-9221-49193552bcba. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 714.393714] env[61273]: DEBUG oslo_concurrency.lockutils [req-e35d060b-5d96-44ee-ac29-629d0edae389 req-4db23249-241c-4dce-a235-929a5fe7bfb3 service nova] Acquiring lock "refresh_cache-23774aa5-1608-495f-8015-29e25f856c69" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 714.393995] env[61273]: DEBUG oslo_concurrency.lockutils [req-e35d060b-5d96-44ee-ac29-629d0edae389 req-4db23249-241c-4dce-a235-929a5fe7bfb3 service nova] Acquired lock "refresh_cache-23774aa5-1608-495f-8015-29e25f856c69" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 714.394777] env[61273]: DEBUG nova.network.neutron [req-e35d060b-5d96-44ee-ac29-629d0edae389 req-4db23249-241c-4dce-a235-929a5fe7bfb3 service nova] [instance: 23774aa5-1608-495f-8015-29e25f856c69] Refreshing network info cache for port f1a08c1d-bfd9-47a7-9221-49193552bcba {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 714.395302] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-e35d060b-5d96-44ee-ac29-629d0edae389 req-4db23249-241c-4dce-a235-929a5fe7bfb3 service nova] Expecting reply to msg 6552e81ece0242589401c37e7b90ce89 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 714.402068] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6552e81ece0242589401c37e7b90ce89 [ 714.413979] env[61273]: ERROR nova.compute.manager [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port f1a08c1d-bfd9-47a7-9221-49193552bcba, please check neutron logs for more information. [ 714.413979] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 714.413979] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 714.413979] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 714.413979] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 714.413979] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 714.413979] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 714.413979] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 714.413979] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 714.413979] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 714.413979] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 714.413979] env[61273]: ERROR nova.compute.manager raise self.value [ 714.413979] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 714.413979] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 714.413979] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 714.413979] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 714.414574] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 714.414574] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 714.414574] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port f1a08c1d-bfd9-47a7-9221-49193552bcba, please check neutron logs for more information. [ 714.414574] env[61273]: ERROR nova.compute.manager [ 714.414915] env[61273]: Traceback (most recent call last): [ 714.415024] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 714.415024] env[61273]: listener.cb(fileno) [ 714.415112] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 714.415112] env[61273]: result = function(*args, **kwargs) [ 714.415194] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 714.415194] env[61273]: return func(*args, **kwargs) [ 714.415266] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 714.415266] env[61273]: raise e [ 714.415345] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 714.415345] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 714.415431] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 714.415431] env[61273]: created_port_ids = self._update_ports_for_instance( [ 714.415503] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 714.415503] env[61273]: with excutils.save_and_reraise_exception(): [ 714.415572] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 714.415572] env[61273]: self.force_reraise() [ 714.415640] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 714.415640] env[61273]: raise self.value [ 714.415708] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 714.415708] env[61273]: updated_port = self._update_port( [ 714.415783] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 714.415783] env[61273]: _ensure_no_port_binding_failure(port) [ 714.415854] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 714.415854] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 714.415934] env[61273]: nova.exception.PortBindingFailed: Binding failed for port f1a08c1d-bfd9-47a7-9221-49193552bcba, please check neutron logs for more information. [ 714.415990] env[61273]: Removing descriptor: 19 [ 714.417138] env[61273]: ERROR nova.compute.manager [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] [instance: 23774aa5-1608-495f-8015-29e25f856c69] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port f1a08c1d-bfd9-47a7-9221-49193552bcba, please check neutron logs for more information. [ 714.417138] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] Traceback (most recent call last): [ 714.417138] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 714.417138] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] yield resources [ 714.417138] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 714.417138] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] self.driver.spawn(context, instance, image_meta, [ 714.417138] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 714.417138] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] self._vmops.spawn(context, instance, image_meta, injected_files, [ 714.417138] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 714.417138] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] vm_ref = self.build_virtual_machine(instance, [ 714.417138] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 714.417596] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] vif_infos = vmwarevif.get_vif_info(self._session, [ 714.417596] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 714.417596] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] for vif in network_info: [ 714.417596] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 714.417596] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] return self._sync_wrapper(fn, *args, **kwargs) [ 714.417596] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 714.417596] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] self.wait() [ 714.417596] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 714.417596] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] self[:] = self._gt.wait() [ 714.417596] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 714.417596] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] return self._exit_event.wait() [ 714.417596] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 714.417596] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] result = hub.switch() [ 714.418035] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 714.418035] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] return self.greenlet.switch() [ 714.418035] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 714.418035] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] result = function(*args, **kwargs) [ 714.418035] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 714.418035] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] return func(*args, **kwargs) [ 714.418035] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 714.418035] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] raise e [ 714.418035] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 714.418035] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] nwinfo = self.network_api.allocate_for_instance( [ 714.418035] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 714.418035] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] created_port_ids = self._update_ports_for_instance( [ 714.418035] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 714.418477] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] with excutils.save_and_reraise_exception(): [ 714.418477] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 714.418477] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] self.force_reraise() [ 714.418477] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 714.418477] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] raise self.value [ 714.418477] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 714.418477] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] updated_port = self._update_port( [ 714.418477] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 714.418477] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] _ensure_no_port_binding_failure(port) [ 714.418477] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 714.418477] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] raise exception.PortBindingFailed(port_id=port['id']) [ 714.418477] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] nova.exception.PortBindingFailed: Binding failed for port f1a08c1d-bfd9-47a7-9221-49193552bcba, please check neutron logs for more information. [ 714.418477] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] [ 714.420374] env[61273]: INFO nova.compute.manager [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] [instance: 23774aa5-1608-495f-8015-29e25f856c69] Terminating instance [ 714.422499] env[61273]: DEBUG oslo_concurrency.lockutils [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] Acquiring lock "refresh_cache-23774aa5-1608-495f-8015-29e25f856c69" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 714.511148] env[61273]: DEBUG oslo_concurrency.lockutils [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Releasing lock "refresh_cache-2b87dfbe-2b94-4787-a795-94f8b63f651c" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 714.511390] env[61273]: DEBUG nova.compute.manager [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 714.511550] env[61273]: DEBUG nova.compute.manager [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 714.511902] env[61273]: DEBUG nova.network.neutron [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 714.531837] env[61273]: DEBUG nova.network.neutron [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 714.532707] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Expecting reply to msg bac3be1526014245aa8701802b450415 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 714.545640] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bac3be1526014245aa8701802b450415 [ 714.805557] env[61273]: DEBUG nova.scheduler.client.report [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 714.808167] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] Expecting reply to msg 5bc747641e96419988d370702b97786c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 714.833754] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5bc747641e96419988d370702b97786c [ 714.951039] env[61273]: DEBUG nova.network.neutron [req-e35d060b-5d96-44ee-ac29-629d0edae389 req-4db23249-241c-4dce-a235-929a5fe7bfb3 service nova] [instance: 23774aa5-1608-495f-8015-29e25f856c69] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 715.038403] env[61273]: DEBUG nova.network.neutron [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 715.038984] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Expecting reply to msg 62ca57035b384a89855cff58b90e0640 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 715.047522] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 62ca57035b384a89855cff58b90e0640 [ 715.071879] env[61273]: DEBUG nova.network.neutron [req-e35d060b-5d96-44ee-ac29-629d0edae389 req-4db23249-241c-4dce-a235-929a5fe7bfb3 service nova] [instance: 23774aa5-1608-495f-8015-29e25f856c69] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 715.072484] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-e35d060b-5d96-44ee-ac29-629d0edae389 req-4db23249-241c-4dce-a235-929a5fe7bfb3 service nova] Expecting reply to msg 73867c2d7ead4f798a648f84b05b75e2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 715.082128] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 73867c2d7ead4f798a648f84b05b75e2 [ 715.310750] env[61273]: DEBUG oslo_concurrency.lockutils [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.919s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 715.311320] env[61273]: ERROR nova.compute.manager [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 9d5ceb62-1624-4a37-9b8b-42997a4fb38c, please check neutron logs for more information. [ 715.311320] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] Traceback (most recent call last): [ 715.311320] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 715.311320] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] self.driver.spawn(context, instance, image_meta, [ 715.311320] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 715.311320] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 715.311320] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 715.311320] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] vm_ref = self.build_virtual_machine(instance, [ 715.311320] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 715.311320] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] vif_infos = vmwarevif.get_vif_info(self._session, [ 715.311320] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 715.311793] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] for vif in network_info: [ 715.311793] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 715.311793] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] return self._sync_wrapper(fn, *args, **kwargs) [ 715.311793] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 715.311793] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] self.wait() [ 715.311793] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 715.311793] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] self[:] = self._gt.wait() [ 715.311793] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 715.311793] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] return self._exit_event.wait() [ 715.311793] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 715.311793] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] result = hub.switch() [ 715.311793] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 715.311793] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] return self.greenlet.switch() [ 715.312266] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 715.312266] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] result = function(*args, **kwargs) [ 715.312266] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 715.312266] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] return func(*args, **kwargs) [ 715.312266] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 715.312266] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] raise e [ 715.312266] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 715.312266] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] nwinfo = self.network_api.allocate_for_instance( [ 715.312266] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 715.312266] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] created_port_ids = self._update_ports_for_instance( [ 715.312266] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 715.312266] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] with excutils.save_and_reraise_exception(): [ 715.312266] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 715.312708] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] self.force_reraise() [ 715.312708] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 715.312708] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] raise self.value [ 715.312708] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 715.312708] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] updated_port = self._update_port( [ 715.312708] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 715.312708] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] _ensure_no_port_binding_failure(port) [ 715.312708] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 715.312708] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] raise exception.PortBindingFailed(port_id=port['id']) [ 715.312708] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] nova.exception.PortBindingFailed: Binding failed for port 9d5ceb62-1624-4a37-9b8b-42997a4fb38c, please check neutron logs for more information. [ 715.312708] env[61273]: ERROR nova.compute.manager [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] [ 715.313081] env[61273]: DEBUG nova.compute.utils [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] Binding failed for port 9d5ceb62-1624-4a37-9b8b-42997a4fb38c, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 715.313312] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.836s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 715.315058] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Expecting reply to msg 2828ffb72e604eac8c49069b8deb831c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 715.317314] env[61273]: DEBUG nova.compute.manager [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] Build of instance e2560c8e-61c6-4343-82cb-47dc5b1997fb was re-scheduled: Binding failed for port 9d5ceb62-1624-4a37-9b8b-42997a4fb38c, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 715.317754] env[61273]: DEBUG nova.compute.manager [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 715.317982] env[61273]: DEBUG oslo_concurrency.lockutils [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] Acquiring lock "refresh_cache-e2560c8e-61c6-4343-82cb-47dc5b1997fb" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 715.318127] env[61273]: DEBUG oslo_concurrency.lockutils [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] Acquired lock "refresh_cache-e2560c8e-61c6-4343-82cb-47dc5b1997fb" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 715.318285] env[61273]: DEBUG nova.network.neutron [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 715.318650] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] Expecting reply to msg 321e89edb0b142aaaa33bf56f12f00b5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 715.332056] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 321e89edb0b142aaaa33bf56f12f00b5 [ 715.356145] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2828ffb72e604eac8c49069b8deb831c [ 715.542172] env[61273]: INFO nova.compute.manager [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] [instance: 2b87dfbe-2b94-4787-a795-94f8b63f651c] Took 1.03 seconds to deallocate network for instance. [ 715.543933] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Expecting reply to msg ab1592c68ccb4bf3a3680755f49c8b00 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 715.575044] env[61273]: DEBUG oslo_concurrency.lockutils [req-e35d060b-5d96-44ee-ac29-629d0edae389 req-4db23249-241c-4dce-a235-929a5fe7bfb3 service nova] Releasing lock "refresh_cache-23774aa5-1608-495f-8015-29e25f856c69" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 715.575435] env[61273]: DEBUG oslo_concurrency.lockutils [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] Acquired lock "refresh_cache-23774aa5-1608-495f-8015-29e25f856c69" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 715.575616] env[61273]: DEBUG nova.network.neutron [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] [instance: 23774aa5-1608-495f-8015-29e25f856c69] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 715.576055] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] Expecting reply to msg 238bea8d85904977b5a6dd4843e66d6a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 715.582847] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ab1592c68ccb4bf3a3680755f49c8b00 [ 715.583500] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 238bea8d85904977b5a6dd4843e66d6a [ 715.844771] env[61273]: DEBUG nova.network.neutron [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 715.985779] env[61273]: DEBUG nova.network.neutron [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 715.986723] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] Expecting reply to msg ccf30cdd81f847cc8f3a9ff483638845 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 715.995951] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ccf30cdd81f847cc8f3a9ff483638845 [ 716.048541] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Expecting reply to msg 78df62af8efc4bc5ab5b57b001e83003 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 716.085224] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 78df62af8efc4bc5ab5b57b001e83003 [ 716.107061] env[61273]: DEBUG nova.network.neutron [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] [instance: 23774aa5-1608-495f-8015-29e25f856c69] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 716.164717] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93166ca5-4ca0-41be-be45-8af8fac6f3da {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.173589] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebd47537-583e-4ba3-b93c-743a7eb09818 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.202643] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87d09750-7357-42df-9b40-e8aa2dfcc76f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.209709] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecd7d49d-42eb-42db-8861-bd1a14a64bd6 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.223690] env[61273]: DEBUG nova.compute.provider_tree [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 716.224191] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Expecting reply to msg 0a89f899990d4d95af27e6869b0a6c3f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 716.230659] env[61273]: DEBUG nova.network.neutron [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] [instance: 23774aa5-1608-495f-8015-29e25f856c69] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 716.231414] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] Expecting reply to msg c319b91a4dd8489d875ca0edd558f9b2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 716.232902] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0a89f899990d4d95af27e6869b0a6c3f [ 716.240147] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c319b91a4dd8489d875ca0edd558f9b2 [ 716.415581] env[61273]: DEBUG nova.compute.manager [req-56071202-e871-4a6b-86e3-da8fd5b24883 req-66527354-68f3-44cb-ba8c-0a79c179f4b6 service nova] [instance: 23774aa5-1608-495f-8015-29e25f856c69] Received event network-vif-deleted-f1a08c1d-bfd9-47a7-9221-49193552bcba {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 716.488548] env[61273]: DEBUG oslo_concurrency.lockutils [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] Releasing lock "refresh_cache-e2560c8e-61c6-4343-82cb-47dc5b1997fb" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 716.488787] env[61273]: DEBUG nova.compute.manager [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 716.488973] env[61273]: DEBUG nova.compute.manager [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 716.489139] env[61273]: DEBUG nova.network.neutron [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 716.507212] env[61273]: DEBUG nova.network.neutron [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 716.508095] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] Expecting reply to msg 23c7b0e688ce425891c5dfd42bb77bb6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 716.517364] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 23c7b0e688ce425891c5dfd42bb77bb6 [ 716.571993] env[61273]: INFO nova.scheduler.client.report [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Deleted allocations for instance 2b87dfbe-2b94-4787-a795-94f8b63f651c [ 716.578214] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Expecting reply to msg f8f001053e9e4ce4900841918ff2c574 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 716.596742] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f8f001053e9e4ce4900841918ff2c574 [ 716.726612] env[61273]: DEBUG nova.scheduler.client.report [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 716.729231] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Expecting reply to msg 4ad40ec62f0840f88e0ba88f97b119f5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 716.733239] env[61273]: DEBUG oslo_concurrency.lockutils [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] Releasing lock "refresh_cache-23774aa5-1608-495f-8015-29e25f856c69" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 716.733755] env[61273]: DEBUG nova.compute.manager [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] [instance: 23774aa5-1608-495f-8015-29e25f856c69] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 716.734044] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] [instance: 23774aa5-1608-495f-8015-29e25f856c69] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 716.734415] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dbd2e512-1587-4bd4-8456-4f62c60eac2b {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.745225] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c29030d3-e1bd-4ce8-bae9-4233412cd17e {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.756407] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4ad40ec62f0840f88e0ba88f97b119f5 [ 716.768882] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] [instance: 23774aa5-1608-495f-8015-29e25f856c69] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 23774aa5-1608-495f-8015-29e25f856c69 could not be found. [ 716.769088] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] [instance: 23774aa5-1608-495f-8015-29e25f856c69] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 716.769268] env[61273]: INFO nova.compute.manager [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] [instance: 23774aa5-1608-495f-8015-29e25f856c69] Took 0.04 seconds to destroy the instance on the hypervisor. [ 716.769511] env[61273]: DEBUG oslo.service.loopingcall [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 716.769800] env[61273]: DEBUG nova.compute.manager [-] [instance: 23774aa5-1608-495f-8015-29e25f856c69] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 716.769955] env[61273]: DEBUG nova.network.neutron [-] [instance: 23774aa5-1608-495f-8015-29e25f856c69] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 716.807023] env[61273]: DEBUG nova.network.neutron [-] [instance: 23774aa5-1608-495f-8015-29e25f856c69] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 716.807557] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 6b989d20ebd94816ac54372214d61321 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 716.814998] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6b989d20ebd94816ac54372214d61321 [ 717.010510] env[61273]: DEBUG nova.network.neutron [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 717.011089] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] Expecting reply to msg 0a1d219618724462be559ecfbd6d1ec2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 717.018993] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0a1d219618724462be559ecfbd6d1ec2 [ 717.079805] env[61273]: DEBUG oslo_concurrency.lockutils [None req-1e27a361-db14-451a-b65f-741626e0d2e4 tempest-MigrationsAdminTest-1696452469 tempest-MigrationsAdminTest-1696452469-project-member] Lock "2b87dfbe-2b94-4787-a795-94f8b63f651c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 132.906s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 717.080617] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Expecting reply to msg 3410db8e8e6d43008644aab6342edb52 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 717.091985] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3410db8e8e6d43008644aab6342edb52 [ 717.231530] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.918s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 717.235018] env[61273]: ERROR nova.compute.manager [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port cd73e761-b035-4816-a3f2-ac6f23681fc7, please check neutron logs for more information. [ 717.235018] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] Traceback (most recent call last): [ 717.235018] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 717.235018] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] self.driver.spawn(context, instance, image_meta, [ 717.235018] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 717.235018] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] self._vmops.spawn(context, instance, image_meta, injected_files, [ 717.235018] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 717.235018] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] vm_ref = self.build_virtual_machine(instance, [ 717.235018] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 717.235018] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] vif_infos = vmwarevif.get_vif_info(self._session, [ 717.235018] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 717.235424] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] for vif in network_info: [ 717.235424] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 717.235424] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] return self._sync_wrapper(fn, *args, **kwargs) [ 717.235424] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 717.235424] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] self.wait() [ 717.235424] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 717.235424] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] self[:] = self._gt.wait() [ 717.235424] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 717.235424] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] return self._exit_event.wait() [ 717.235424] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 717.235424] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] result = hub.switch() [ 717.235424] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 717.235424] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] return self.greenlet.switch() [ 717.235820] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 717.235820] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] result = function(*args, **kwargs) [ 717.235820] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 717.235820] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] return func(*args, **kwargs) [ 717.235820] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 717.235820] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] raise e [ 717.235820] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 717.235820] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] nwinfo = self.network_api.allocate_for_instance( [ 717.235820] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 717.235820] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] created_port_ids = self._update_ports_for_instance( [ 717.235820] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 717.235820] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] with excutils.save_and_reraise_exception(): [ 717.235820] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 717.236223] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] self.force_reraise() [ 717.236223] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 717.236223] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] raise self.value [ 717.236223] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 717.236223] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] updated_port = self._update_port( [ 717.236223] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 717.236223] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] _ensure_no_port_binding_failure(port) [ 717.236223] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 717.236223] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] raise exception.PortBindingFailed(port_id=port['id']) [ 717.236223] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] nova.exception.PortBindingFailed: Binding failed for port cd73e761-b035-4816-a3f2-ac6f23681fc7, please check neutron logs for more information. [ 717.236223] env[61273]: ERROR nova.compute.manager [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] [ 717.236551] env[61273]: DEBUG nova.compute.utils [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] Binding failed for port cd73e761-b035-4816-a3f2-ac6f23681fc7, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 717.236551] env[61273]: DEBUG oslo_concurrency.lockutils [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 12.398s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 717.236551] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Expecting reply to msg 2c13a57f9a334464aba55c9efdf36ec2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 717.245521] env[61273]: DEBUG nova.compute.manager [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] Build of instance 3635532a-2af3-4ef5-a922-37fc763c9708 was re-scheduled: Binding failed for port cd73e761-b035-4816-a3f2-ac6f23681fc7, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 717.245521] env[61273]: DEBUG nova.compute.manager [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 717.245521] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Acquiring lock "refresh_cache-3635532a-2af3-4ef5-a922-37fc763c9708" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 717.245521] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Acquired lock "refresh_cache-3635532a-2af3-4ef5-a922-37fc763c9708" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 717.246046] env[61273]: DEBUG nova.network.neutron [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 717.246046] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Expecting reply to msg eda9f69d0a3f4a8ea87e3b2d1a4d28b4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 717.246046] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eda9f69d0a3f4a8ea87e3b2d1a4d28b4 [ 717.290248] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2c13a57f9a334464aba55c9efdf36ec2 [ 717.309885] env[61273]: DEBUG nova.network.neutron [-] [instance: 23774aa5-1608-495f-8015-29e25f856c69] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 717.310321] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 459986f2eb7b4ab6aa644578d98edaab in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 717.318983] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 459986f2eb7b4ab6aa644578d98edaab [ 717.513191] env[61273]: INFO nova.compute.manager [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] [instance: e2560c8e-61c6-4343-82cb-47dc5b1997fb] Took 1.02 seconds to deallocate network for instance. [ 717.514870] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] Expecting reply to msg cfb8bed73e0f447aad3d56add0af8c3d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 717.557648] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cfb8bed73e0f447aad3d56add0af8c3d [ 717.582235] env[61273]: DEBUG nova.compute.manager [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 717.584080] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Expecting reply to msg 7fa59c038f364d8a84b15e5bcaaacd96 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 717.632223] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7fa59c038f364d8a84b15e5bcaaacd96 [ 717.778359] env[61273]: DEBUG nova.network.neutron [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 717.818262] env[61273]: INFO nova.compute.manager [-] [instance: 23774aa5-1608-495f-8015-29e25f856c69] Took 1.05 seconds to deallocate network for instance. [ 717.828092] env[61273]: DEBUG nova.compute.claims [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] [instance: 23774aa5-1608-495f-8015-29e25f856c69] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 717.828301] env[61273]: DEBUG oslo_concurrency.lockutils [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 717.983254] env[61273]: DEBUG nova.network.neutron [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 717.984095] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Expecting reply to msg a5f08744531e4840a3a9ba1847e60734 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 718.002886] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a5f08744531e4840a3a9ba1847e60734 [ 718.021651] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] Expecting reply to msg 0c4173f15c8d4784b4936115fd867083 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 718.067961] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0c4173f15c8d4784b4936115fd867083 [ 718.107515] env[61273]: DEBUG oslo_concurrency.lockutils [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 718.133177] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Acquiring lock "05901bd4-2bad-405e-8e73-f6de4393a0f8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 718.133391] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Lock "05901bd4-2bad-405e-8e73-f6de4393a0f8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 718.144212] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28023c95-243b-4c66-a98b-d2f39aebf856 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.154394] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07640794-7a25-42e5-b4ab-29be5c45659f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.187286] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b89ee49a-741b-4877-b757-1c53c2446e7e {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.195485] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6424b5b-af5d-464b-84b5-bdd6c32e5dee {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.215196] env[61273]: DEBUG nova.compute.provider_tree [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 718.215698] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Expecting reply to msg 00309043ea504d1b819fed5c2859aba8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 718.224610] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 00309043ea504d1b819fed5c2859aba8 [ 718.492608] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Releasing lock "refresh_cache-3635532a-2af3-4ef5-a922-37fc763c9708" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 718.492608] env[61273]: DEBUG nova.compute.manager [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 718.492608] env[61273]: DEBUG nova.compute.manager [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 718.492757] env[61273]: DEBUG nova.network.neutron [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 718.533659] env[61273]: DEBUG nova.network.neutron [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 718.534277] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Expecting reply to msg c3bd8ee8f9884efca5b52254f7ac000a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 718.550403] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c3bd8ee8f9884efca5b52254f7ac000a [ 718.557818] env[61273]: INFO nova.scheduler.client.report [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] Deleted allocations for instance e2560c8e-61c6-4343-82cb-47dc5b1997fb [ 718.574023] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] Expecting reply to msg a0d6264f1b594290a6e413ac37975b93 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 718.596501] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a0d6264f1b594290a6e413ac37975b93 [ 718.615285] env[61273]: DEBUG oslo_concurrency.lockutils [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Acquiring lock "e8e826d4-2463-41a7-8c63-fd9f47eceea6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 718.615516] env[61273]: DEBUG oslo_concurrency.lockutils [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Lock "e8e826d4-2463-41a7-8c63-fd9f47eceea6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 718.718669] env[61273]: DEBUG nova.scheduler.client.report [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 718.721121] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Expecting reply to msg 9becb850c8d746f7b3b612adeb39bf34 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 718.732786] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9becb850c8d746f7b3b612adeb39bf34 [ 719.042178] env[61273]: DEBUG nova.network.neutron [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 719.042461] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Expecting reply to msg 8aa7462c282b45a2bc956cf7073f942e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 719.052805] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8aa7462c282b45a2bc956cf7073f942e [ 719.080137] env[61273]: DEBUG oslo_concurrency.lockutils [None req-4070bcf9-37dc-4465-b011-fc55e0fedc30 tempest-ServerAddressesTestJSON-1376018912 tempest-ServerAddressesTestJSON-1376018912-project-member] Lock "e2560c8e-61c6-4343-82cb-47dc5b1997fb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 132.342s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 719.080958] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] Expecting reply to msg 56bdc66c55004c10b07864f565a58018 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 719.100058] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 56bdc66c55004c10b07864f565a58018 [ 719.228028] env[61273]: DEBUG oslo_concurrency.lockutils [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.989s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 719.228028] env[61273]: ERROR nova.compute.manager [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port e4f545c6-0fe7-4d41-a153-b9d2b62b4dc4, please check neutron logs for more information. [ 719.228028] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] Traceback (most recent call last): [ 719.228028] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 719.228028] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] self.driver.spawn(context, instance, image_meta, [ 719.228028] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 719.228028] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] self._vmops.spawn(context, instance, image_meta, injected_files, [ 719.228028] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 719.228028] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] vm_ref = self.build_virtual_machine(instance, [ 719.229993] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 719.229993] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] vif_infos = vmwarevif.get_vif_info(self._session, [ 719.229993] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 719.229993] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] for vif in network_info: [ 719.229993] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 719.229993] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] return self._sync_wrapper(fn, *args, **kwargs) [ 719.229993] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 719.229993] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] self.wait() [ 719.229993] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 719.229993] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] self[:] = self._gt.wait() [ 719.229993] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 719.229993] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] return self._exit_event.wait() [ 719.229993] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 719.230631] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] current.throw(*self._exc) [ 719.230631] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 719.230631] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] result = function(*args, **kwargs) [ 719.230631] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 719.230631] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] return func(*args, **kwargs) [ 719.230631] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 719.230631] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] raise e [ 719.230631] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 719.230631] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] nwinfo = self.network_api.allocate_for_instance( [ 719.230631] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 719.230631] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] created_port_ids = self._update_ports_for_instance( [ 719.230631] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 719.230631] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] with excutils.save_and_reraise_exception(): [ 719.231151] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 719.231151] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] self.force_reraise() [ 719.231151] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 719.231151] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] raise self.value [ 719.231151] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 719.231151] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] updated_port = self._update_port( [ 719.231151] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 719.231151] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] _ensure_no_port_binding_failure(port) [ 719.231151] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 719.231151] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] raise exception.PortBindingFailed(port_id=port['id']) [ 719.231151] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] nova.exception.PortBindingFailed: Binding failed for port e4f545c6-0fe7-4d41-a153-b9d2b62b4dc4, please check neutron logs for more information. [ 719.231151] env[61273]: ERROR nova.compute.manager [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] [ 719.231618] env[61273]: DEBUG nova.compute.utils [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] Binding failed for port e4f545c6-0fe7-4d41-a153-b9d2b62b4dc4, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 719.231618] env[61273]: DEBUG oslo_concurrency.lockutils [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 11.750s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 719.236671] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Expecting reply to msg c56cabc120fe4953ad4c45a167f3208b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 719.236671] env[61273]: DEBUG nova.compute.manager [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] Build of instance 144c3c21-b18e-4997-a241-8ff21a3b4835 was re-scheduled: Binding failed for port e4f545c6-0fe7-4d41-a153-b9d2b62b4dc4, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 719.236671] env[61273]: DEBUG nova.compute.manager [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 719.236671] env[61273]: DEBUG oslo_concurrency.lockutils [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Acquiring lock "refresh_cache-144c3c21-b18e-4997-a241-8ff21a3b4835" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 719.237398] env[61273]: DEBUG oslo_concurrency.lockutils [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Acquired lock "refresh_cache-144c3c21-b18e-4997-a241-8ff21a3b4835" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 719.237398] env[61273]: DEBUG nova.network.neutron [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 719.237398] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Expecting reply to msg c5c7510712b846928589dd58e4fd0a0d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 719.250797] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c5c7510712b846928589dd58e4fd0a0d [ 719.300089] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c56cabc120fe4953ad4c45a167f3208b [ 719.545573] env[61273]: INFO nova.compute.manager [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] [instance: 3635532a-2af3-4ef5-a922-37fc763c9708] Took 1.05 seconds to deallocate network for instance. [ 719.547302] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Expecting reply to msg a91486088b9e4f2a8accf920af0e3b9a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 719.582819] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a91486088b9e4f2a8accf920af0e3b9a [ 719.589313] env[61273]: DEBUG nova.compute.manager [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 719.591205] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] Expecting reply to msg c13b4eae7d094f858191d89d2b5391d5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 719.634771] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c13b4eae7d094f858191d89d2b5391d5 [ 719.767405] env[61273]: DEBUG nova.network.neutron [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 719.883486] env[61273]: DEBUG nova.network.neutron [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 719.883989] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Expecting reply to msg d834771c9d5a40b7afe82dedd2380ab0 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 719.893361] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d834771c9d5a40b7afe82dedd2380ab0 [ 720.053576] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Expecting reply to msg 1bcbf9d81e3845f389be7f6393b830eb in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 720.098280] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1bcbf9d81e3845f389be7f6393b830eb [ 720.115336] env[61273]: DEBUG oslo_concurrency.lockutils [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 720.126092] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3228d344-cb29-42ab-8eab-511da0c8dd68 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.134580] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7a907b8-51e1-4d76-b9ec-e6a3400ce9bb {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.170655] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54267f9f-f2ee-462d-9bf0-cbd66ac55ab6 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.180525] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42d7eee6-199f-413e-89d2-9bcfaed6f3b9 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.196572] env[61273]: DEBUG nova.compute.provider_tree [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 720.197222] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Expecting reply to msg e9e42c8fc76349aeb9b8a3c76a0717b7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 720.207040] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e9e42c8fc76349aeb9b8a3c76a0717b7 [ 720.386554] env[61273]: DEBUG oslo_concurrency.lockutils [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Releasing lock "refresh_cache-144c3c21-b18e-4997-a241-8ff21a3b4835" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 720.386836] env[61273]: DEBUG nova.compute.manager [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 720.386986] env[61273]: DEBUG nova.compute.manager [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 720.387154] env[61273]: DEBUG nova.network.neutron [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 720.429803] env[61273]: DEBUG nova.network.neutron [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 720.436402] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Expecting reply to msg ac7d48e1319841a885a7c9569a1a8b39 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 720.437998] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ac7d48e1319841a885a7c9569a1a8b39 [ 720.518461] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 3f7543a4236f4a01916f253a8f58999c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 720.530525] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3f7543a4236f4a01916f253a8f58999c [ 720.583563] env[61273]: INFO nova.scheduler.client.report [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Deleted allocations for instance 3635532a-2af3-4ef5-a922-37fc763c9708 [ 720.591559] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Expecting reply to msg 867b8d3b31f446a5bf6c41684757546f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 720.608364] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 867b8d3b31f446a5bf6c41684757546f [ 720.699739] env[61273]: DEBUG nova.scheduler.client.report [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 720.702303] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Expecting reply to msg 14b839e9bd614ea18d632c6fb145b757 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 720.714858] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 14b839e9bd614ea18d632c6fb145b757 [ 720.935323] env[61273]: DEBUG nova.network.neutron [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 720.935869] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Expecting reply to msg f54db40e46c54c26ab16f978f3ff2fb3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 720.949086] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f54db40e46c54c26ab16f978f3ff2fb3 [ 721.094836] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d26c7b29-1c09-4fcd-b378-a0fc6d64532f tempest-ServerGroupTestJSON-900251174 tempest-ServerGroupTestJSON-900251174-project-member] Lock "3635532a-2af3-4ef5-a922-37fc763c9708" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 124.606s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 721.095441] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg bf8001a06acc4c4ba9cd90e278011fee in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 721.106588] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bf8001a06acc4c4ba9cd90e278011fee [ 721.205665] env[61273]: DEBUG oslo_concurrency.lockutils [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.979s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 721.206304] env[61273]: ERROR nova.compute.manager [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 2d7689b9-6f2a-4729-b658-80c4feb01295, please check neutron logs for more information. [ 721.206304] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] Traceback (most recent call last): [ 721.206304] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 721.206304] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] self.driver.spawn(context, instance, image_meta, [ 721.206304] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 721.206304] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 721.206304] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 721.206304] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] vm_ref = self.build_virtual_machine(instance, [ 721.206304] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 721.206304] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] vif_infos = vmwarevif.get_vif_info(self._session, [ 721.206304] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 721.206763] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] for vif in network_info: [ 721.206763] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 721.206763] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] return self._sync_wrapper(fn, *args, **kwargs) [ 721.206763] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 721.206763] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] self.wait() [ 721.206763] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 721.206763] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] self[:] = self._gt.wait() [ 721.206763] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 721.206763] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] return self._exit_event.wait() [ 721.206763] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 721.206763] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] current.throw(*self._exc) [ 721.206763] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 721.206763] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] result = function(*args, **kwargs) [ 721.207236] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 721.207236] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] return func(*args, **kwargs) [ 721.207236] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 721.207236] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] raise e [ 721.207236] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 721.207236] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] nwinfo = self.network_api.allocate_for_instance( [ 721.207236] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 721.207236] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] created_port_ids = self._update_ports_for_instance( [ 721.207236] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 721.207236] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] with excutils.save_and_reraise_exception(): [ 721.207236] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 721.207236] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] self.force_reraise() [ 721.207236] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 721.207767] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] raise self.value [ 721.207767] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 721.207767] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] updated_port = self._update_port( [ 721.207767] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 721.207767] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] _ensure_no_port_binding_failure(port) [ 721.207767] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 721.207767] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] raise exception.PortBindingFailed(port_id=port['id']) [ 721.207767] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] nova.exception.PortBindingFailed: Binding failed for port 2d7689b9-6f2a-4729-b658-80c4feb01295, please check neutron logs for more information. [ 721.207767] env[61273]: ERROR nova.compute.manager [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] [ 721.207767] env[61273]: DEBUG nova.compute.utils [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] Binding failed for port 2d7689b9-6f2a-4729-b658-80c4feb01295, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 721.208248] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.236s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 721.210195] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Expecting reply to msg 5c59cd12c54f4c699113d36fe1a6f31e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 721.211340] env[61273]: DEBUG nova.compute.manager [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] Build of instance bf40cc8c-4729-49c5-8c9d-e3ee09606aa5 was re-scheduled: Binding failed for port 2d7689b9-6f2a-4729-b658-80c4feb01295, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 721.211781] env[61273]: DEBUG nova.compute.manager [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 721.212032] env[61273]: DEBUG oslo_concurrency.lockutils [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Acquiring lock "refresh_cache-bf40cc8c-4729-49c5-8c9d-e3ee09606aa5" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 721.212165] env[61273]: DEBUG oslo_concurrency.lockutils [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Acquired lock "refresh_cache-bf40cc8c-4729-49c5-8c9d-e3ee09606aa5" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 721.212374] env[61273]: DEBUG nova.network.neutron [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 721.212703] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Expecting reply to msg 604ddb34381e46b6ba7783f220be609e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 721.219111] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 604ddb34381e46b6ba7783f220be609e [ 721.256208] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5c59cd12c54f4c699113d36fe1a6f31e [ 721.438540] env[61273]: INFO nova.compute.manager [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] [instance: 144c3c21-b18e-4997-a241-8ff21a3b4835] Took 1.05 seconds to deallocate network for instance. [ 721.440430] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Expecting reply to msg 3bad4d1f913d4bd9816fab916d2f075e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 721.477454] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3bad4d1f913d4bd9816fab916d2f075e [ 721.598577] env[61273]: DEBUG nova.compute.manager [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 721.600394] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 7fad646c320349a087d098afd712d203 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 721.616322] env[61273]: DEBUG oslo_concurrency.lockutils [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] Acquiring lock "f6faf064-364d-4d24-9822-220bce47b3f0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 721.616552] env[61273]: DEBUG oslo_concurrency.lockutils [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] Lock "f6faf064-364d-4d24-9822-220bce47b3f0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 721.656240] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7fad646c320349a087d098afd712d203 [ 721.742631] env[61273]: DEBUG nova.network.neutron [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 721.919558] env[61273]: DEBUG nova.network.neutron [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 721.920199] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Expecting reply to msg 3444b5238ea34dbba45b1ca269ec4252 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 721.929464] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3444b5238ea34dbba45b1ca269ec4252 [ 721.945553] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Expecting reply to msg 4028ac99af5845dabcb860cd2520a3d1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 721.988983] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4028ac99af5845dabcb860cd2520a3d1 [ 722.066951] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6e0155a-9024-45d2-9e54-12ecffad02ec {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.074240] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27dd96f1-12b0-465b-ab08-42a25babf400 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.107508] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5b46421-d444-4732-bcaf-f91bb20511b5 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.118412] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab55855e-38e3-4424-b30c-c9e384086ecd {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.123995] env[61273]: DEBUG oslo_concurrency.lockutils [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 722.134438] env[61273]: DEBUG nova.compute.provider_tree [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 722.135077] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Expecting reply to msg aa9a0646e1f840ef9b1769bab8f6f9ce in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 722.142173] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aa9a0646e1f840ef9b1769bab8f6f9ce [ 722.423236] env[61273]: DEBUG oslo_concurrency.lockutils [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Releasing lock "refresh_cache-bf40cc8c-4729-49c5-8c9d-e3ee09606aa5" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 722.423466] env[61273]: DEBUG nova.compute.manager [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 722.423638] env[61273]: DEBUG nova.compute.manager [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 722.423801] env[61273]: DEBUG nova.network.neutron [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 722.448169] env[61273]: DEBUG nova.network.neutron [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 722.448455] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Expecting reply to msg f7160cb542794d63babdf5f4d1681ddd in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 722.474058] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f7160cb542794d63babdf5f4d1681ddd [ 722.475279] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] Acquiring lock "d63e20b1-e4ee-4c90-bc94-c4c05917fa1f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 722.475438] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] Lock "d63e20b1-e4ee-4c90-bc94-c4c05917fa1f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 722.476403] env[61273]: INFO nova.scheduler.client.report [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Deleted allocations for instance 144c3c21-b18e-4997-a241-8ff21a3b4835 [ 722.484209] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Expecting reply to msg e2336e12e39b41c38b287adf1155d6d4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 722.500694] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e2336e12e39b41c38b287adf1155d6d4 [ 722.637238] env[61273]: DEBUG nova.scheduler.client.report [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 722.640150] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Expecting reply to msg e49cbdb62c6d4d5ba6c8e599a65f3d32 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 722.651331] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e49cbdb62c6d4d5ba6c8e599a65f3d32 [ 722.954542] env[61273]: DEBUG nova.network.neutron [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 722.954542] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Expecting reply to msg 64e59c8db21b4b32b69b3e5ed1951453 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 722.962258] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 64e59c8db21b4b32b69b3e5ed1951453 [ 722.986741] env[61273]: DEBUG oslo_concurrency.lockutils [None req-73d42730-9e06-4816-85bf-9175ea4cca99 tempest-ServerActionsV293TestJSON-1761589095 tempest-ServerActionsV293TestJSON-1761589095-project-member] Lock "144c3c21-b18e-4997-a241-8ff21a3b4835" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 121.138s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 722.986741] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] Expecting reply to msg a2d0c85f708f4529a87197e8b4c4e7b0 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 723.003550] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a2d0c85f708f4529a87197e8b4c4e7b0 [ 723.144263] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.936s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 723.144892] env[61273]: ERROR nova.compute.manager [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 469e6b69-bd24-49e1-a6b3-a58c8e848eba, please check neutron logs for more information. [ 723.144892] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] Traceback (most recent call last): [ 723.144892] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 723.144892] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] self.driver.spawn(context, instance, image_meta, [ 723.144892] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 723.144892] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] self._vmops.spawn(context, instance, image_meta, injected_files, [ 723.144892] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 723.144892] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] vm_ref = self.build_virtual_machine(instance, [ 723.144892] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 723.144892] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] vif_infos = vmwarevif.get_vif_info(self._session, [ 723.144892] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 723.145259] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] for vif in network_info: [ 723.145259] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 723.145259] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] return self._sync_wrapper(fn, *args, **kwargs) [ 723.145259] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 723.145259] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] self.wait() [ 723.145259] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 723.145259] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] self[:] = self._gt.wait() [ 723.145259] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 723.145259] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] return self._exit_event.wait() [ 723.145259] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 723.145259] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] current.throw(*self._exc) [ 723.145259] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 723.145259] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] result = function(*args, **kwargs) [ 723.145658] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 723.145658] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] return func(*args, **kwargs) [ 723.145658] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 723.145658] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] raise e [ 723.145658] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 723.145658] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] nwinfo = self.network_api.allocate_for_instance( [ 723.145658] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 723.145658] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] created_port_ids = self._update_ports_for_instance( [ 723.145658] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 723.145658] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] with excutils.save_and_reraise_exception(): [ 723.145658] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 723.145658] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] self.force_reraise() [ 723.145658] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 723.146053] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] raise self.value [ 723.146053] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 723.146053] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] updated_port = self._update_port( [ 723.146053] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 723.146053] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] _ensure_no_port_binding_failure(port) [ 723.146053] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 723.146053] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] raise exception.PortBindingFailed(port_id=port['id']) [ 723.146053] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] nova.exception.PortBindingFailed: Binding failed for port 469e6b69-bd24-49e1-a6b3-a58c8e848eba, please check neutron logs for more information. [ 723.146053] env[61273]: ERROR nova.compute.manager [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] [ 723.146053] env[61273]: DEBUG nova.compute.utils [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] Binding failed for port 469e6b69-bd24-49e1-a6b3-a58c8e848eba, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 723.146825] env[61273]: DEBUG oslo_concurrency.lockutils [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.194s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 723.148639] env[61273]: INFO nova.compute.claims [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 723.150298] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] Expecting reply to msg 7ee3349a335e4c968ee3d24d8a785276 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 723.151848] env[61273]: DEBUG nova.compute.manager [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] Build of instance 8d63e0a8-85a1-400b-a6f0-8e87c7945655 was re-scheduled: Binding failed for port 469e6b69-bd24-49e1-a6b3-a58c8e848eba, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 723.152362] env[61273]: DEBUG nova.compute.manager [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 723.152672] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Acquiring lock "refresh_cache-8d63e0a8-85a1-400b-a6f0-8e87c7945655" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 723.152925] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Acquired lock "refresh_cache-8d63e0a8-85a1-400b-a6f0-8e87c7945655" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 723.153154] env[61273]: DEBUG nova.network.neutron [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 723.153571] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Expecting reply to msg 0e59bfc5d54a4a18b7da79114c73f36e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 723.159546] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0e59bfc5d54a4a18b7da79114c73f36e [ 723.188362] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7ee3349a335e4c968ee3d24d8a785276 [ 723.457002] env[61273]: INFO nova.compute.manager [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: bf40cc8c-4729-49c5-8c9d-e3ee09606aa5] Took 1.03 seconds to deallocate network for instance. [ 723.459783] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Expecting reply to msg 9d48bbefbc8c4f58acebb0eee7bf6020 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 723.487968] env[61273]: DEBUG nova.compute.manager [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 723.489711] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] Expecting reply to msg ae5748cb89b046b08a7665cd630e5c5c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 723.499390] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9d48bbefbc8c4f58acebb0eee7bf6020 [ 723.533736] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ae5748cb89b046b08a7665cd630e5c5c [ 723.657651] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] Expecting reply to msg 998b20eb7806446ca934658f0787e472 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 723.668609] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 998b20eb7806446ca934658f0787e472 [ 723.697340] env[61273]: DEBUG nova.network.neutron [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 723.816917] env[61273]: DEBUG nova.network.neutron [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 723.816917] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Expecting reply to msg 2504ffaf982649b5a36a85f55057f1a1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 723.829198] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2504ffaf982649b5a36a85f55057f1a1 [ 723.968594] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Expecting reply to msg eb622653ec594dfe961b1bfdcd5a2f84 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 724.005621] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eb622653ec594dfe961b1bfdcd5a2f84 [ 724.022743] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 724.321572] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Releasing lock "refresh_cache-8d63e0a8-85a1-400b-a6f0-8e87c7945655" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 724.321866] env[61273]: DEBUG nova.compute.manager [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 724.322046] env[61273]: DEBUG nova.compute.manager [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 724.322213] env[61273]: DEBUG nova.network.neutron [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 724.352878] env[61273]: DEBUG nova.network.neutron [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 724.353472] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Expecting reply to msg 1fd0e7b16fa04c90886bdf8ada42f05d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 724.360602] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1fd0e7b16fa04c90886bdf8ada42f05d [ 724.476456] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b4bbd09-28f8-43ca-9064-73b3a6e9775e {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.486581] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cdd0dbf-4cda-438b-a654-04d2c22dde58 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.491782] env[61273]: INFO nova.scheduler.client.report [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Deleted allocations for instance bf40cc8c-4729-49c5-8c9d-e3ee09606aa5 [ 724.497934] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Expecting reply to msg 0ef55120e6d9448e94c3a75e59a5bda1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 724.536388] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0ef55120e6d9448e94c3a75e59a5bda1 [ 724.537252] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e7b6fe2-8940-44fb-b74f-7e69c9dae737 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.545898] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-897664ea-2230-43cc-aae1-7caa0fea5b49 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.559419] env[61273]: DEBUG nova.compute.provider_tree [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 724.559922] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] Expecting reply to msg 512c51fd1f61413ebb85e8187400a03c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 724.570560] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 512c51fd1f61413ebb85e8187400a03c [ 724.857909] env[61273]: DEBUG nova.network.neutron [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 724.858427] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Expecting reply to msg 289e39e52170482796d5283a150dfd5a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 724.867428] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 289e39e52170482796d5283a150dfd5a [ 724.999735] env[61273]: DEBUG oslo_concurrency.lockutils [None req-98d95f9f-6622-4817-b1ed-60eccd2e5db5 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Lock "bf40cc8c-4729-49c5-8c9d-e3ee09606aa5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 114.767s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 725.000325] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg d7b3ca0d47e047a7b2d6d2c3a1529bdb in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 725.010004] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d7b3ca0d47e047a7b2d6d2c3a1529bdb [ 725.062697] env[61273]: DEBUG nova.scheduler.client.report [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 725.064957] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] Expecting reply to msg 9ed2e1b0b64b4b8d956f2ff36eba22b4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 725.080441] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9ed2e1b0b64b4b8d956f2ff36eba22b4 [ 725.360928] env[61273]: INFO nova.compute.manager [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] [instance: 8d63e0a8-85a1-400b-a6f0-8e87c7945655] Took 1.04 seconds to deallocate network for instance. [ 725.362854] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Expecting reply to msg eb37d1afd1d74a24bc37fe742a1ad4fc in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 725.405055] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eb37d1afd1d74a24bc37fe742a1ad4fc [ 725.502173] env[61273]: DEBUG nova.compute.manager [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: c376b161-74f9-405a-bb86-516583a9a76f] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 725.504136] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 31c76838b1594ab69792e916838835e6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 725.540398] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 31c76838b1594ab69792e916838835e6 [ 725.566969] env[61273]: DEBUG oslo_concurrency.lockutils [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.420s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 725.567547] env[61273]: DEBUG nova.compute.manager [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 725.569462] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] Expecting reply to msg c20b6be0646f48eb9c0ba25b0d54620d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 725.570507] env[61273]: DEBUG oslo_concurrency.lockutils [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.735s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 725.572041] env[61273]: INFO nova.compute.claims [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: ca8a38c7-a81c-407a-9558-3d15e492d9fa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 725.573522] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Expecting reply to msg 1bbb6aa5998c4496b3e9ff6a9b0e710a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 725.617059] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c20b6be0646f48eb9c0ba25b0d54620d [ 725.625180] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1bbb6aa5998c4496b3e9ff6a9b0e710a [ 725.867609] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Expecting reply to msg 30e46dd0549c4837938990c2c1e4e053 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 725.903389] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 30e46dd0549c4837938990c2c1e4e053 [ 726.022717] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 726.078981] env[61273]: DEBUG nova.compute.utils [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 726.079690] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] Expecting reply to msg b065939c27a34fabbad69bd00cb11383 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 726.082228] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Expecting reply to msg 0954adde97124a778355bdb8e75f5d91 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 726.083020] env[61273]: DEBUG nova.compute.manager [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 726.083188] env[61273]: DEBUG nova.network.neutron [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 726.089958] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0954adde97124a778355bdb8e75f5d91 [ 726.092843] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b065939c27a34fabbad69bd00cb11383 [ 726.131059] env[61273]: DEBUG nova.policy [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9e14d098d2604a1b9543150852012a1a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '03a137cec9c540259688f75fb14dce35', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 726.308195] env[61273]: DEBUG oslo_concurrency.lockutils [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Acquiring lock "ebc03a5c-984f-4d58-abb0-da555adcfbac" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 726.308426] env[61273]: DEBUG oslo_concurrency.lockutils [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Lock "ebc03a5c-984f-4d58-abb0-da555adcfbac" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 726.391628] env[61273]: INFO nova.scheduler.client.report [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Deleted allocations for instance 8d63e0a8-85a1-400b-a6f0-8e87c7945655 [ 726.397312] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Expecting reply to msg 854d20cd0f964acfa85bd5bb5deee2eb in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 726.408664] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 854d20cd0f964acfa85bd5bb5deee2eb [ 726.489647] env[61273]: DEBUG nova.network.neutron [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] Successfully created port: 70e066e6-557a-491a-9f73-a77f76256833 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 726.588990] env[61273]: DEBUG nova.compute.manager [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 726.590618] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] Expecting reply to msg 541d4f572b484754aee9161e6f2381ca in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 726.627573] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 541d4f572b484754aee9161e6f2381ca [ 726.887408] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8abd618f-b108-4dc5-ae20-f29754aecd12 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.895341] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b2dd451-a179-4344-9940-9c31e87264e6 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.898635] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f6fe3ed7-48ec-4806-b05c-9ce19e0fdfe6 tempest-ServersTestBootFromVolume-1266653635 tempest-ServersTestBootFromVolume-1266653635-project-member] Lock "8d63e0a8-85a1-400b-a6f0-8e87c7945655" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 121.830s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 726.899238] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Expecting reply to msg 0057bf0221764cd4b300b46256a12098 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 726.927824] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0057bf0221764cd4b300b46256a12098 [ 726.928795] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7864fb46-103a-4ffe-94ae-89ab67ed33b7 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.936636] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51062ccb-157d-4cfe-a9c8-759c890d73a3 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.951522] env[61273]: DEBUG nova.compute.provider_tree [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 726.952121] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Expecting reply to msg 7791528899694544bcda9ae38c218055 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 726.959564] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7791528899694544bcda9ae38c218055 [ 727.094989] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] Expecting reply to msg 492c81777fe14288b94ae54d50e90074 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 727.137261] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 492c81777fe14288b94ae54d50e90074 [ 727.373292] env[61273]: DEBUG nova.compute.manager [req-ab06af22-25c5-4cda-81d2-49f2e35add2e req-9036ffb3-8a0a-4bdf-90cc-2d1f64598f17 service nova] [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] Received event network-changed-70e066e6-557a-491a-9f73-a77f76256833 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 727.373481] env[61273]: DEBUG nova.compute.manager [req-ab06af22-25c5-4cda-81d2-49f2e35add2e req-9036ffb3-8a0a-4bdf-90cc-2d1f64598f17 service nova] [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] Refreshing instance network info cache due to event network-changed-70e066e6-557a-491a-9f73-a77f76256833. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 727.373702] env[61273]: DEBUG oslo_concurrency.lockutils [req-ab06af22-25c5-4cda-81d2-49f2e35add2e req-9036ffb3-8a0a-4bdf-90cc-2d1f64598f17 service nova] Acquiring lock "refresh_cache-b6a158f8-6e2a-4967-ad05-761804ec6590" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 727.373839] env[61273]: DEBUG oslo_concurrency.lockutils [req-ab06af22-25c5-4cda-81d2-49f2e35add2e req-9036ffb3-8a0a-4bdf-90cc-2d1f64598f17 service nova] Acquired lock "refresh_cache-b6a158f8-6e2a-4967-ad05-761804ec6590" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 727.374020] env[61273]: DEBUG nova.network.neutron [req-ab06af22-25c5-4cda-81d2-49f2e35add2e req-9036ffb3-8a0a-4bdf-90cc-2d1f64598f17 service nova] [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] Refreshing network info cache for port 70e066e6-557a-491a-9f73-a77f76256833 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 727.374437] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-ab06af22-25c5-4cda-81d2-49f2e35add2e req-9036ffb3-8a0a-4bdf-90cc-2d1f64598f17 service nova] Expecting reply to msg 45f52322cedc48f0b1b8779b67be8ac3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 727.381828] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 45f52322cedc48f0b1b8779b67be8ac3 [ 727.401557] env[61273]: DEBUG nova.compute.manager [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 727.403518] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Expecting reply to msg 1f1f5d404c374b57811dde38002c1ea7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 727.440181] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1f1f5d404c374b57811dde38002c1ea7 [ 727.460892] env[61273]: DEBUG nova.scheduler.client.report [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 727.464404] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Expecting reply to msg 8abb7f0753aa4bbb958c4abdb9174191 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 727.476579] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8abb7f0753aa4bbb958c4abdb9174191 [ 727.580251] env[61273]: ERROR nova.compute.manager [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 70e066e6-557a-491a-9f73-a77f76256833, please check neutron logs for more information. [ 727.580251] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 727.580251] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 727.580251] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 727.580251] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 727.580251] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 727.580251] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 727.580251] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 727.580251] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 727.580251] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 727.580251] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 727.580251] env[61273]: ERROR nova.compute.manager raise self.value [ 727.580251] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 727.580251] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 727.580251] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 727.580251] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 727.580836] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 727.580836] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 727.580836] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 70e066e6-557a-491a-9f73-a77f76256833, please check neutron logs for more information. [ 727.580836] env[61273]: ERROR nova.compute.manager [ 727.580836] env[61273]: Traceback (most recent call last): [ 727.580836] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 727.580836] env[61273]: listener.cb(fileno) [ 727.580836] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 727.580836] env[61273]: result = function(*args, **kwargs) [ 727.580836] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 727.580836] env[61273]: return func(*args, **kwargs) [ 727.580836] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 727.580836] env[61273]: raise e [ 727.580836] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 727.580836] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 727.580836] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 727.580836] env[61273]: created_port_ids = self._update_ports_for_instance( [ 727.580836] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 727.580836] env[61273]: with excutils.save_and_reraise_exception(): [ 727.580836] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 727.580836] env[61273]: self.force_reraise() [ 727.580836] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 727.580836] env[61273]: raise self.value [ 727.580836] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 727.580836] env[61273]: updated_port = self._update_port( [ 727.580836] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 727.580836] env[61273]: _ensure_no_port_binding_failure(port) [ 727.580836] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 727.580836] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 727.581803] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 70e066e6-557a-491a-9f73-a77f76256833, please check neutron logs for more information. [ 727.581803] env[61273]: Removing descriptor: 19 [ 727.597781] env[61273]: DEBUG nova.compute.manager [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 727.622190] env[61273]: DEBUG nova.virt.hardware [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 727.622530] env[61273]: DEBUG nova.virt.hardware [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 727.622610] env[61273]: DEBUG nova.virt.hardware [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 727.622818] env[61273]: DEBUG nova.virt.hardware [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 727.622962] env[61273]: DEBUG nova.virt.hardware [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 727.623220] env[61273]: DEBUG nova.virt.hardware [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 727.623434] env[61273]: DEBUG nova.virt.hardware [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 727.623658] env[61273]: DEBUG nova.virt.hardware [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 727.623854] env[61273]: DEBUG nova.virt.hardware [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 727.624599] env[61273]: DEBUG nova.virt.hardware [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 727.624823] env[61273]: DEBUG nova.virt.hardware [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 727.625678] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a3d1a00-581c-451a-a5bd-f6b35b8b8d1c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.634229] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f665251-9f3c-457b-a594-1a17ce10c351 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.648251] env[61273]: ERROR nova.compute.manager [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 70e066e6-557a-491a-9f73-a77f76256833, please check neutron logs for more information. [ 727.648251] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] Traceback (most recent call last): [ 727.648251] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 727.648251] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] yield resources [ 727.648251] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 727.648251] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] self.driver.spawn(context, instance, image_meta, [ 727.648251] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 727.648251] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] self._vmops.spawn(context, instance, image_meta, injected_files, [ 727.648251] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 727.648251] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] vm_ref = self.build_virtual_machine(instance, [ 727.648251] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 727.648625] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] vif_infos = vmwarevif.get_vif_info(self._session, [ 727.648625] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 727.648625] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] for vif in network_info: [ 727.648625] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 727.648625] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] return self._sync_wrapper(fn, *args, **kwargs) [ 727.648625] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 727.648625] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] self.wait() [ 727.648625] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 727.648625] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] self[:] = self._gt.wait() [ 727.648625] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 727.648625] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] return self._exit_event.wait() [ 727.648625] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 727.648625] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] current.throw(*self._exc) [ 727.649038] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 727.649038] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] result = function(*args, **kwargs) [ 727.649038] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 727.649038] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] return func(*args, **kwargs) [ 727.649038] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 727.649038] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] raise e [ 727.649038] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 727.649038] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] nwinfo = self.network_api.allocate_for_instance( [ 727.649038] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 727.649038] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] created_port_ids = self._update_ports_for_instance( [ 727.649038] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 727.649038] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] with excutils.save_and_reraise_exception(): [ 727.649038] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 727.649448] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] self.force_reraise() [ 727.649448] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 727.649448] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] raise self.value [ 727.649448] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 727.649448] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] updated_port = self._update_port( [ 727.649448] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 727.649448] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] _ensure_no_port_binding_failure(port) [ 727.649448] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 727.649448] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] raise exception.PortBindingFailed(port_id=port['id']) [ 727.649448] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] nova.exception.PortBindingFailed: Binding failed for port 70e066e6-557a-491a-9f73-a77f76256833, please check neutron logs for more information. [ 727.649448] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] [ 727.649448] env[61273]: INFO nova.compute.manager [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] Terminating instance [ 727.650472] env[61273]: DEBUG oslo_concurrency.lockutils [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] Acquiring lock "refresh_cache-b6a158f8-6e2a-4967-ad05-761804ec6590" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 727.923704] env[61273]: DEBUG oslo_concurrency.lockutils [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 727.928412] env[61273]: DEBUG nova.network.neutron [req-ab06af22-25c5-4cda-81d2-49f2e35add2e req-9036ffb3-8a0a-4bdf-90cc-2d1f64598f17 service nova] [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 727.967241] env[61273]: DEBUG oslo_concurrency.lockutils [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.397s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 727.967757] env[61273]: DEBUG nova.compute.manager [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: ca8a38c7-a81c-407a-9558-3d15e492d9fa] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 727.969453] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Expecting reply to msg e6e17c5e9d4b4f6b819a50c1eec9dfe9 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 727.970521] env[61273]: DEBUG oslo_concurrency.lockutils [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.185s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 727.971841] env[61273]: INFO nova.compute.claims [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 727.973405] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Expecting reply to msg ea44289c4aaa4b1b9e13aa9a75927e8f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 728.025200] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e6e17c5e9d4b4f6b819a50c1eec9dfe9 [ 728.026171] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ea44289c4aaa4b1b9e13aa9a75927e8f [ 728.038636] env[61273]: DEBUG nova.network.neutron [req-ab06af22-25c5-4cda-81d2-49f2e35add2e req-9036ffb3-8a0a-4bdf-90cc-2d1f64598f17 service nova] [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 728.038930] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-ab06af22-25c5-4cda-81d2-49f2e35add2e req-9036ffb3-8a0a-4bdf-90cc-2d1f64598f17 service nova] Expecting reply to msg 304bbd71e0d64662a671abe427c5d2d7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 728.047229] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 304bbd71e0d64662a671abe427c5d2d7 [ 728.476852] env[61273]: DEBUG nova.compute.utils [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 728.477523] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Expecting reply to msg dd0eee235c724bd189c8825ae0250e76 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 728.484136] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Expecting reply to msg aedb3e917bec4fa290f371a6aa56bfe0 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 728.484136] env[61273]: DEBUG nova.compute.manager [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: ca8a38c7-a81c-407a-9558-3d15e492d9fa] Not allocating networking since 'none' was specified. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 728.490795] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aedb3e917bec4fa290f371a6aa56bfe0 [ 728.494971] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dd0eee235c724bd189c8825ae0250e76 [ 728.541239] env[61273]: DEBUG oslo_concurrency.lockutils [req-ab06af22-25c5-4cda-81d2-49f2e35add2e req-9036ffb3-8a0a-4bdf-90cc-2d1f64598f17 service nova] Releasing lock "refresh_cache-b6a158f8-6e2a-4967-ad05-761804ec6590" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 728.541654] env[61273]: DEBUG oslo_concurrency.lockutils [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] Acquired lock "refresh_cache-b6a158f8-6e2a-4967-ad05-761804ec6590" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 728.541866] env[61273]: DEBUG nova.network.neutron [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 728.542283] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] Expecting reply to msg f499b8b2239245429eb8a86a9e7dcc8a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 728.548467] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f499b8b2239245429eb8a86a9e7dcc8a [ 728.982315] env[61273]: DEBUG nova.compute.manager [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: ca8a38c7-a81c-407a-9558-3d15e492d9fa] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 728.984354] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Expecting reply to msg 0b41fb9b334e4e3eb936d087354b32c0 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 729.035262] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0b41fb9b334e4e3eb936d087354b32c0 [ 729.075257] env[61273]: DEBUG nova.network.neutron [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 729.215350] env[61273]: DEBUG nova.network.neutron [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 729.215555] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] Expecting reply to msg 135c9564f21b45659ea13d1ba3412abf in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 729.223091] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 135c9564f21b45659ea13d1ba3412abf [ 729.346241] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-520d3922-c06e-4751-82b1-5989bbe08cf3 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.354359] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b1bb5a0-95c6-42b0-a9b1-c15e6fe17a99 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.384970] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da9a8677-38b1-44e2-84c0-aa1810cd9d36 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.392385] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df74979d-5524-4654-8ed1-6562cdd23e56 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.405645] env[61273]: DEBUG nova.compute.provider_tree [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 729.406175] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Expecting reply to msg 5593ebccc71d4150924d67ddd45c1a2f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 729.408620] env[61273]: DEBUG nova.compute.manager [req-18c7da3e-a9d6-437f-9d2c-31654db37653 req-3546aa10-3223-40b6-a94f-731f0841b6b0 service nova] [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] Received event network-vif-deleted-70e066e6-557a-491a-9f73-a77f76256833 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 729.413329] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5593ebccc71d4150924d67ddd45c1a2f [ 729.491072] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Expecting reply to msg b7ed6917edca4330b6cbacc110525973 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 729.552725] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b7ed6917edca4330b6cbacc110525973 [ 729.718295] env[61273]: DEBUG oslo_concurrency.lockutils [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] Releasing lock "refresh_cache-b6a158f8-6e2a-4967-ad05-761804ec6590" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 729.718743] env[61273]: DEBUG nova.compute.manager [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 729.718957] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 729.719261] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0facc5dd-ce44-4124-a1b6-9b0b597009d0 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.729998] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c943d678-0a1e-4ef4-8902-3e9606724161 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.761075] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b6a158f8-6e2a-4967-ad05-761804ec6590 could not be found. [ 729.761147] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 729.761358] env[61273]: INFO nova.compute.manager [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] Took 0.04 seconds to destroy the instance on the hypervisor. [ 729.761813] env[61273]: DEBUG oslo.service.loopingcall [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 729.762269] env[61273]: DEBUG nova.compute.manager [-] [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 729.762399] env[61273]: DEBUG nova.network.neutron [-] [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 729.787894] env[61273]: DEBUG nova.network.neutron [-] [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 729.788500] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 7fbce5386b504699834af5e89aee89c8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 729.801405] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7fbce5386b504699834af5e89aee89c8 [ 729.910892] env[61273]: DEBUG nova.scheduler.client.report [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 729.913487] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Expecting reply to msg fd6d9f1d5ee1404ead113bd2c12645c1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 729.925196] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fd6d9f1d5ee1404ead113bd2c12645c1 [ 729.993796] env[61273]: DEBUG nova.compute.manager [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: ca8a38c7-a81c-407a-9558-3d15e492d9fa] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 730.019229] env[61273]: DEBUG nova.virt.hardware [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 730.019477] env[61273]: DEBUG nova.virt.hardware [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 730.019831] env[61273]: DEBUG nova.virt.hardware [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 730.020060] env[61273]: DEBUG nova.virt.hardware [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 730.020218] env[61273]: DEBUG nova.virt.hardware [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 730.020367] env[61273]: DEBUG nova.virt.hardware [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 730.020576] env[61273]: DEBUG nova.virt.hardware [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 730.020737] env[61273]: DEBUG nova.virt.hardware [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 730.020905] env[61273]: DEBUG nova.virt.hardware [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 730.021065] env[61273]: DEBUG nova.virt.hardware [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 730.021240] env[61273]: DEBUG nova.virt.hardware [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 730.022218] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93b45521-e029-4454-89f1-a9d3f42810d0 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.030475] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4498b18d-bc03-4c9b-85cc-6b358e46659c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.046644] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: ca8a38c7-a81c-407a-9558-3d15e492d9fa] Instance VIF info [] {{(pid=61273) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 730.052562] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Creating folder: Project (2160ae8026c4462bb19f174f578e8768). Parent ref: group-v103328. {{(pid=61273) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 730.052962] env[61273]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-08067380-5114-4347-aafd-67f7ad0c242d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.063131] env[61273]: INFO nova.virt.vmwareapi.vm_util [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Created folder: Project (2160ae8026c4462bb19f174f578e8768) in parent group-v103328. [ 730.063326] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Creating folder: Instances. Parent ref: group-v103348. {{(pid=61273) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 730.063447] env[61273]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ecb9dff5-1603-4556-aed4-07b17bb17f57 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.071706] env[61273]: INFO nova.virt.vmwareapi.vm_util [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Created folder: Instances in parent group-v103348. [ 730.071937] env[61273]: DEBUG oslo.service.loopingcall [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 730.072130] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ca8a38c7-a81c-407a-9558-3d15e492d9fa] Creating VM on the ESX host {{(pid=61273) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 730.072318] env[61273]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3b73b48d-ade4-4376-83e6-7ccdc201d8fa {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.087671] env[61273]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 730.087671] env[61273]: value = "task-375298" [ 730.087671] env[61273]: _type = "Task" [ 730.087671] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.094618] env[61273]: DEBUG oslo_vmware.api [-] Task: {'id': task-375298, 'name': CreateVM_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.290895] env[61273]: DEBUG nova.network.neutron [-] [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 730.291436] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 8d2962e632a94434b2c076e3d2c273f6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 730.300668] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8d2962e632a94434b2c076e3d2c273f6 [ 730.416407] env[61273]: DEBUG oslo_concurrency.lockutils [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.446s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 730.416973] env[61273]: DEBUG nova.compute.manager [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 730.418636] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Expecting reply to msg 6846e6f6089b4252b5fa379823d1157c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 730.428021] env[61273]: DEBUG oslo_concurrency.lockutils [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 12.592s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 730.428021] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] Expecting reply to msg 9050133bb43f4c6f809a4b72a9149457 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 730.454731] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6846e6f6089b4252b5fa379823d1157c [ 730.469836] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9050133bb43f4c6f809a4b72a9149457 [ 730.597797] env[61273]: DEBUG oslo_vmware.api [-] Task: {'id': task-375298, 'name': CreateVM_Task, 'duration_secs': 0.241663} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.597969] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ca8a38c7-a81c-407a-9558-3d15e492d9fa] Created VM on the ESX host {{(pid=61273) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 730.598385] env[61273]: DEBUG oslo_concurrency.lockutils [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 730.598544] env[61273]: DEBUG oslo_concurrency.lockutils [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 730.598860] env[61273]: DEBUG oslo_concurrency.lockutils [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 730.599104] env[61273]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b2c504a-f5b2-420d-9420-02774d1e96ff {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.607228] env[61273]: DEBUG oslo_vmware.api [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Waiting for the task: (returnval){ [ 730.607228] env[61273]: value = "session[527e49f3-ce92-061c-8407-29c8a2392124]529952e4-a255-f774-27eb-922b53814dd1" [ 730.607228] env[61273]: _type = "Task" [ 730.607228] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.615353] env[61273]: DEBUG oslo_vmware.api [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]529952e4-a255-f774-27eb-922b53814dd1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.793796] env[61273]: INFO nova.compute.manager [-] [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] Took 1.03 seconds to deallocate network for instance. [ 730.796284] env[61273]: DEBUG nova.compute.claims [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 730.796466] env[61273]: DEBUG oslo_concurrency.lockutils [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 730.922463] env[61273]: DEBUG nova.compute.utils [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 730.923554] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Expecting reply to msg d62f97d3d419426aae0aa68d50d251a6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 730.925023] env[61273]: DEBUG nova.compute.manager [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Not allocating networking since 'none' was specified. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 730.936832] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d62f97d3d419426aae0aa68d50d251a6 [ 731.120862] env[61273]: DEBUG oslo_vmware.api [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]529952e4-a255-f774-27eb-922b53814dd1, 'name': SearchDatastore_Task, 'duration_secs': 0.009748} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.121166] env[61273]: DEBUG oslo_concurrency.lockutils [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 731.121400] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: ca8a38c7-a81c-407a-9558-3d15e492d9fa] Processing image 4a9e718e-a6a1-4b4a-b567-8e55529b2d5b {{(pid=61273) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 731.121631] env[61273]: DEBUG oslo_concurrency.lockutils [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 731.121764] env[61273]: DEBUG oslo_concurrency.lockutils [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 731.122527] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61273) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 731.122801] env[61273]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b79d913a-2c47-439b-bf73-d79afe372402 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.132888] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61273) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 731.132888] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61273) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 731.133595] env[61273]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b69acef1-0b4e-486b-809d-1a5c2379a173 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.138499] env[61273]: DEBUG oslo_vmware.api [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Waiting for the task: (returnval){ [ 731.138499] env[61273]: value = "session[527e49f3-ce92-061c-8407-29c8a2392124]5240d3e3-c57c-2156-ebbb-6dd9330d9aa5" [ 731.138499] env[61273]: _type = "Task" [ 731.138499] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.152823] env[61273]: DEBUG oslo_vmware.api [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]5240d3e3-c57c-2156-ebbb-6dd9330d9aa5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.166554] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 731.167146] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 731.167774] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg f6f5bc1c047e4c64bed979a94b855c9d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 731.208698] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f6f5bc1c047e4c64bed979a94b855c9d [ 731.266912] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97abc4f3-6adc-481c-93eb-093bd9dc8c28 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.273409] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3b7df2b-0eb8-410a-87dc-673ed79dd9a1 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.304579] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62d35772-c42e-418b-b2f2-98200c61b264 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.312028] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1f435d7-cee9-4563-9cd3-32b83289bb2f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.325222] env[61273]: DEBUG nova.compute.provider_tree [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 731.325865] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] Expecting reply to msg 1c67732c4bce42c29971425faed3e819 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 731.334211] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1c67732c4bce42c29971425faed3e819 [ 731.425720] env[61273]: DEBUG nova.compute.manager [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 731.427543] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Expecting reply to msg 827a87b7df8345fe869e59e5609313a1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 731.464630] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 827a87b7df8345fe869e59e5609313a1 [ 731.648767] env[61273]: DEBUG oslo_vmware.api [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]5240d3e3-c57c-2156-ebbb-6dd9330d9aa5, 'name': SearchDatastore_Task, 'duration_secs': 0.008224} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.649523] env[61273]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-830c23b8-0f2d-4917-b50a-7b835f2a3482 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.654462] env[61273]: DEBUG oslo_vmware.api [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Waiting for the task: (returnval){ [ 731.654462] env[61273]: value = "session[527e49f3-ce92-061c-8407-29c8a2392124]5231c177-a392-81f6-e25a-4c378adef865" [ 731.654462] env[61273]: _type = "Task" [ 731.654462] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.661585] env[61273]: DEBUG oslo_vmware.api [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]5231c177-a392-81f6-e25a-4c378adef865, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.673776] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 731.673776] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Starting heal instance info cache {{(pid=61273) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9926}} [ 731.673776] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Rebuilding the list of instances to heal {{(pid=61273) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 731.673776] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg b0568477fd204768b3a133be3b7c82f7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 731.686043] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b0568477fd204768b3a133be3b7c82f7 [ 731.830566] env[61273]: DEBUG nova.scheduler.client.report [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 731.831286] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] Expecting reply to msg 7508f6fb4e584d98961111028ebae88d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 731.842318] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7508f6fb4e584d98961111028ebae88d [ 731.932098] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Expecting reply to msg a7bdc96dd19c4cff95f8ae82153a4fb4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 731.961454] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a7bdc96dd19c4cff95f8ae82153a4fb4 [ 732.165961] env[61273]: DEBUG oslo_vmware.api [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]5231c177-a392-81f6-e25a-4c378adef865, 'name': SearchDatastore_Task, 'duration_secs': 0.009224} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.166239] env[61273]: DEBUG oslo_concurrency.lockutils [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 732.166476] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk to [datastore2] ca8a38c7-a81c-407a-9558-3d15e492d9fa/ca8a38c7-a81c-407a-9558-3d15e492d9fa.vmdk {{(pid=61273) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 732.166730] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7396b2f3-b3b3-4302-acc0-619b11344d98 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.173671] env[61273]: DEBUG oslo_vmware.api [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Waiting for the task: (returnval){ [ 732.173671] env[61273]: value = "task-375299" [ 732.173671] env[61273]: _type = "Task" [ 732.173671] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.178849] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] Skipping network cache update for instance because it is Building. {{(pid=61273) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 732.178998] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] [instance: ca8a38c7-a81c-407a-9558-3d15e492d9fa] Skipping network cache update for instance because it is Building. {{(pid=61273) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 732.179127] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Skipping network cache update for instance because it is Building. {{(pid=61273) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 732.179273] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Didn't find any instances for network info cache update. {{(pid=61273) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10012}} [ 732.182875] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 732.183091] env[61273]: DEBUG oslo_vmware.api [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Task: {'id': task-375299, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.183273] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 732.183443] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 732.183570] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 732.183708] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 732.183850] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 732.183979] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61273) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10545}} [ 732.184139] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager.update_available_resource {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 732.184472] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg a2fb5dc8fb2e44538a51e6a409b96b3b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 732.194642] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a2fb5dc8fb2e44538a51e6a409b96b3b [ 732.333983] env[61273]: DEBUG oslo_concurrency.lockutils [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.914s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 732.334744] env[61273]: ERROR nova.compute.manager [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] [instance: 23774aa5-1608-495f-8015-29e25f856c69] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port f1a08c1d-bfd9-47a7-9221-49193552bcba, please check neutron logs for more information. [ 732.334744] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] Traceback (most recent call last): [ 732.334744] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 732.334744] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] self.driver.spawn(context, instance, image_meta, [ 732.334744] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 732.334744] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] self._vmops.spawn(context, instance, image_meta, injected_files, [ 732.334744] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 732.334744] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] vm_ref = self.build_virtual_machine(instance, [ 732.334744] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 732.334744] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] vif_infos = vmwarevif.get_vif_info(self._session, [ 732.334744] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 732.335660] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] for vif in network_info: [ 732.335660] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 732.335660] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] return self._sync_wrapper(fn, *args, **kwargs) [ 732.335660] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 732.335660] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] self.wait() [ 732.335660] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 732.335660] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] self[:] = self._gt.wait() [ 732.335660] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 732.335660] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] return self._exit_event.wait() [ 732.335660] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 732.335660] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] result = hub.switch() [ 732.335660] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 732.335660] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] return self.greenlet.switch() [ 732.336504] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 732.336504] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] result = function(*args, **kwargs) [ 732.336504] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 732.336504] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] return func(*args, **kwargs) [ 732.336504] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 732.336504] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] raise e [ 732.336504] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 732.336504] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] nwinfo = self.network_api.allocate_for_instance( [ 732.336504] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 732.336504] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] created_port_ids = self._update_ports_for_instance( [ 732.336504] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 732.336504] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] with excutils.save_and_reraise_exception(): [ 732.336504] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 732.337426] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] self.force_reraise() [ 732.337426] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 732.337426] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] raise self.value [ 732.337426] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 732.337426] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] updated_port = self._update_port( [ 732.337426] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 732.337426] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] _ensure_no_port_binding_failure(port) [ 732.337426] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 732.337426] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] raise exception.PortBindingFailed(port_id=port['id']) [ 732.337426] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] nova.exception.PortBindingFailed: Binding failed for port f1a08c1d-bfd9-47a7-9221-49193552bcba, please check neutron logs for more information. [ 732.337426] env[61273]: ERROR nova.compute.manager [instance: 23774aa5-1608-495f-8015-29e25f856c69] [ 732.338257] env[61273]: DEBUG nova.compute.utils [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] [instance: 23774aa5-1608-495f-8015-29e25f856c69] Binding failed for port f1a08c1d-bfd9-47a7-9221-49193552bcba, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 732.338257] env[61273]: DEBUG nova.compute.manager [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] [instance: 23774aa5-1608-495f-8015-29e25f856c69] Build of instance 23774aa5-1608-495f-8015-29e25f856c69 was re-scheduled: Binding failed for port f1a08c1d-bfd9-47a7-9221-49193552bcba, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 732.338257] env[61273]: DEBUG nova.compute.manager [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] [instance: 23774aa5-1608-495f-8015-29e25f856c69] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 732.338257] env[61273]: DEBUG oslo_concurrency.lockutils [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] Acquiring lock "refresh_cache-23774aa5-1608-495f-8015-29e25f856c69" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 732.338744] env[61273]: DEBUG oslo_concurrency.lockutils [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] Acquired lock "refresh_cache-23774aa5-1608-495f-8015-29e25f856c69" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 732.338744] env[61273]: DEBUG nova.network.neutron [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] [instance: 23774aa5-1608-495f-8015-29e25f856c69] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 732.338993] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] Expecting reply to msg f3b9bf062bdc4505b27e076e69c5bae6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 732.340454] env[61273]: DEBUG oslo_concurrency.lockutils [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.233s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 732.342749] env[61273]: INFO nova.compute.claims [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 732.345717] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Expecting reply to msg aa0a30018c4240d5b60fa8e044d59aeb in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 732.349541] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f3b9bf062bdc4505b27e076e69c5bae6 [ 732.381215] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aa0a30018c4240d5b60fa8e044d59aeb [ 732.435604] env[61273]: DEBUG nova.compute.manager [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 732.460920] env[61273]: DEBUG nova.virt.hardware [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 732.461182] env[61273]: DEBUG nova.virt.hardware [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 732.461357] env[61273]: DEBUG nova.virt.hardware [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 732.461509] env[61273]: DEBUG nova.virt.hardware [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 732.461750] env[61273]: DEBUG nova.virt.hardware [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 732.461882] env[61273]: DEBUG nova.virt.hardware [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 732.462089] env[61273]: DEBUG nova.virt.hardware [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 732.462249] env[61273]: DEBUG nova.virt.hardware [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 732.462411] env[61273]: DEBUG nova.virt.hardware [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 732.462571] env[61273]: DEBUG nova.virt.hardware [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 732.462790] env[61273]: DEBUG nova.virt.hardware [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 732.463714] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6704f453-7b31-41f1-8244-23b03da13527 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.471976] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1854526d-5421-4fa9-a827-172d82924912 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.485386] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Instance VIF info [] {{(pid=61273) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 732.490931] env[61273]: DEBUG oslo.service.loopingcall [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 732.491202] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Creating VM on the ESX host {{(pid=61273) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 732.491417] env[61273]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b68a34e2-50c9-4e3b-a44d-2710e2c3ca57 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.513034] env[61273]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 732.513034] env[61273]: value = "task-375300" [ 732.513034] env[61273]: _type = "Task" [ 732.513034] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.522305] env[61273]: DEBUG oslo_vmware.api [-] Task: {'id': task-375300, 'name': CreateVM_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.683303] env[61273]: DEBUG oslo_vmware.api [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Task: {'id': task-375299, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.473692} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.683571] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk to [datastore2] ca8a38c7-a81c-407a-9558-3d15e492d9fa/ca8a38c7-a81c-407a-9558-3d15e492d9fa.vmdk {{(pid=61273) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 732.683773] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: ca8a38c7-a81c-407a-9558-3d15e492d9fa] Extending root virtual disk to 1048576 {{(pid=61273) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 732.684057] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9c2ff25d-ecdd-4a8c-886a-e1f99dcf425b {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.686485] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 732.691085] env[61273]: DEBUG oslo_vmware.api [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Waiting for the task: (returnval){ [ 732.691085] env[61273]: value = "task-375301" [ 732.691085] env[61273]: _type = "Task" [ 732.691085] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.698491] env[61273]: DEBUG oslo_vmware.api [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Task: {'id': task-375301, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.849094] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Expecting reply to msg 174a76fb447c4fa5a7f1d967ddf8383b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 732.857558] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 174a76fb447c4fa5a7f1d967ddf8383b [ 732.858623] env[61273]: DEBUG nova.network.neutron [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] [instance: 23774aa5-1608-495f-8015-29e25f856c69] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 732.934606] env[61273]: DEBUG nova.network.neutron [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] [instance: 23774aa5-1608-495f-8015-29e25f856c69] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 732.935133] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] Expecting reply to msg b39822c314474640b7761a0142ecfab8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 732.948105] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b39822c314474640b7761a0142ecfab8 [ 733.024064] env[61273]: DEBUG oslo_vmware.api [-] Task: {'id': task-375300, 'name': CreateVM_Task, 'duration_secs': 0.272413} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.024248] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Created VM on the ESX host {{(pid=61273) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 733.024673] env[61273]: DEBUG oslo_concurrency.lockutils [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 733.024899] env[61273]: DEBUG oslo_concurrency.lockutils [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 733.025173] env[61273]: DEBUG oslo_concurrency.lockutils [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 733.025379] env[61273]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0d42b88-42a8-4f3f-9435-20258f3852bc {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.029605] env[61273]: DEBUG oslo_vmware.api [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Waiting for the task: (returnval){ [ 733.029605] env[61273]: value = "session[527e49f3-ce92-061c-8407-29c8a2392124]520be8a5-5c44-5ed3-c119-c1f61884c148" [ 733.029605] env[61273]: _type = "Task" [ 733.029605] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.036764] env[61273]: DEBUG oslo_vmware.api [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]520be8a5-5c44-5ed3-c119-c1f61884c148, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.200958] env[61273]: DEBUG oslo_vmware.api [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Task: {'id': task-375301, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061687} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.201273] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: ca8a38c7-a81c-407a-9558-3d15e492d9fa] Extended root virtual disk {{(pid=61273) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 733.202100] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd1bfd31-836e-4629-8c57-0c81df615779 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.227565] env[61273]: DEBUG nova.virt.vmwareapi.volumeops [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: ca8a38c7-a81c-407a-9558-3d15e492d9fa] Reconfiguring VM instance instance-0000002b to attach disk [datastore2] ca8a38c7-a81c-407a-9558-3d15e492d9fa/ca8a38c7-a81c-407a-9558-3d15e492d9fa.vmdk or device None with type sparse {{(pid=61273) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 733.227885] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dffa8491-bcff-46f2-b92c-214ceef35c70 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.253384] env[61273]: DEBUG oslo_vmware.api [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Waiting for the task: (returnval){ [ 733.253384] env[61273]: value = "task-375302" [ 733.253384] env[61273]: _type = "Task" [ 733.253384] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.262720] env[61273]: DEBUG oslo_vmware.api [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Task: {'id': task-375302, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.439105] env[61273]: DEBUG oslo_concurrency.lockutils [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] Releasing lock "refresh_cache-23774aa5-1608-495f-8015-29e25f856c69" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 733.439337] env[61273]: DEBUG nova.compute.manager [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 733.439511] env[61273]: DEBUG nova.compute.manager [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] [instance: 23774aa5-1608-495f-8015-29e25f856c69] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 733.439713] env[61273]: DEBUG nova.network.neutron [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] [instance: 23774aa5-1608-495f-8015-29e25f856c69] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 733.459890] env[61273]: DEBUG nova.network.neutron [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] [instance: 23774aa5-1608-495f-8015-29e25f856c69] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 733.460578] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] Expecting reply to msg 2052a0cdaea44ba99b1f46bd3e7b7bcb in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 733.468559] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2052a0cdaea44ba99b1f46bd3e7b7bcb [ 733.538973] env[61273]: DEBUG oslo_vmware.api [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]520be8a5-5c44-5ed3-c119-c1f61884c148, 'name': SearchDatastore_Task, 'duration_secs': 0.009515} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.541300] env[61273]: DEBUG oslo_concurrency.lockutils [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 733.541544] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Processing image 4a9e718e-a6a1-4b4a-b567-8e55529b2d5b {{(pid=61273) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 733.541780] env[61273]: DEBUG oslo_concurrency.lockutils [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 733.541927] env[61273]: DEBUG oslo_concurrency.lockutils [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 733.542097] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61273) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 733.542516] env[61273]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-adbba46c-5150-406c-a2ee-f245557eb188 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.549873] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61273) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 733.550050] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61273) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 733.550883] env[61273]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-beb659c3-51d1-42b9-85e1-f67db725625d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.557540] env[61273]: DEBUG oslo_vmware.api [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Waiting for the task: (returnval){ [ 733.557540] env[61273]: value = "session[527e49f3-ce92-061c-8407-29c8a2392124]523a6cfa-1781-a895-3e16-58a2d2473cea" [ 733.557540] env[61273]: _type = "Task" [ 733.557540] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.564770] env[61273]: DEBUG oslo_vmware.api [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]523a6cfa-1781-a895-3e16-58a2d2473cea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.634205] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fc49156-0a0c-42c5-b088-c5a4f97af530 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.639967] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a12cd5ea-44ef-4c1f-9442-642437cc31a2 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.672255] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35a3ae26-2d46-4751-aaf6-838ca7c8afaf {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.679248] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6291f2b-324c-430d-81c1-9e0dd9e1f094 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.694033] env[61273]: DEBUG nova.compute.provider_tree [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 733.694988] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Expecting reply to msg 23ecabec899f462693af171fff868903 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 733.702680] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 23ecabec899f462693af171fff868903 [ 733.762997] env[61273]: DEBUG oslo_vmware.api [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Task: {'id': task-375302, 'name': ReconfigVM_Task, 'duration_secs': 0.262109} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.763548] env[61273]: DEBUG nova.virt.vmwareapi.volumeops [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: ca8a38c7-a81c-407a-9558-3d15e492d9fa] Reconfigured VM instance instance-0000002b to attach disk [datastore2] ca8a38c7-a81c-407a-9558-3d15e492d9fa/ca8a38c7-a81c-407a-9558-3d15e492d9fa.vmdk or device None with type sparse {{(pid=61273) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 733.764348] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6db45e6e-7278-4a42-9491-2f70f4ea3e88 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.770384] env[61273]: DEBUG oslo_vmware.api [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Waiting for the task: (returnval){ [ 733.770384] env[61273]: value = "task-375303" [ 733.770384] env[61273]: _type = "Task" [ 733.770384] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.778599] env[61273]: DEBUG oslo_vmware.api [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Task: {'id': task-375303, 'name': Rename_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.963766] env[61273]: DEBUG nova.network.neutron [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] [instance: 23774aa5-1608-495f-8015-29e25f856c69] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 733.963882] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] Expecting reply to msg 2f9beda9332a4597ad70fda641eb905b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 733.972380] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2f9beda9332a4597ad70fda641eb905b [ 734.066887] env[61273]: DEBUG oslo_vmware.api [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]523a6cfa-1781-a895-3e16-58a2d2473cea, 'name': SearchDatastore_Task, 'duration_secs': 0.007939} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.067611] env[61273]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-135f39e4-a650-4516-a0cd-ea1cde1b0b94 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.072455] env[61273]: DEBUG oslo_vmware.api [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Waiting for the task: (returnval){ [ 734.072455] env[61273]: value = "session[527e49f3-ce92-061c-8407-29c8a2392124]527608a6-f887-e225-199b-bcacb813476d" [ 734.072455] env[61273]: _type = "Task" [ 734.072455] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.080021] env[61273]: DEBUG oslo_vmware.api [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]527608a6-f887-e225-199b-bcacb813476d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.198129] env[61273]: DEBUG nova.scheduler.client.report [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 734.200691] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Expecting reply to msg cdde4dacbe764d0cb70b3e0e5907c6b8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 734.215830] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cdde4dacbe764d0cb70b3e0e5907c6b8 [ 734.282030] env[61273]: DEBUG oslo_vmware.api [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Task: {'id': task-375303, 'name': Rename_Task, 'duration_secs': 0.157297} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.283044] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: ca8a38c7-a81c-407a-9558-3d15e492d9fa] Powering on the VM {{(pid=61273) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 734.283480] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-357fcab0-b37f-4e17-9757-42d59b2a3d38 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.290013] env[61273]: DEBUG oslo_vmware.api [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Waiting for the task: (returnval){ [ 734.290013] env[61273]: value = "task-375304" [ 734.290013] env[61273]: _type = "Task" [ 734.290013] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.298134] env[61273]: DEBUG oslo_vmware.api [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Task: {'id': task-375304, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.466553] env[61273]: INFO nova.compute.manager [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] [instance: 23774aa5-1608-495f-8015-29e25f856c69] Took 1.03 seconds to deallocate network for instance. [ 734.468352] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] Expecting reply to msg 2828a96fb3094bc292cffca9e93c5868 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 734.501106] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2828a96fb3094bc292cffca9e93c5868 [ 734.582277] env[61273]: DEBUG oslo_vmware.api [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]527608a6-f887-e225-199b-bcacb813476d, 'name': SearchDatastore_Task, 'duration_secs': 0.008931} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.582523] env[61273]: DEBUG oslo_concurrency.lockutils [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 734.582777] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk to [datastore1] 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d/1efd9cfe-3a0c-412c-aa44-3bf650d08f9d.vmdk {{(pid=61273) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 734.583019] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ab02b6b5-32cf-46fc-9b49-6ed2360923d3 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.590345] env[61273]: DEBUG oslo_vmware.api [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Waiting for the task: (returnval){ [ 734.590345] env[61273]: value = "task-375305" [ 734.590345] env[61273]: _type = "Task" [ 734.590345] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.598119] env[61273]: DEBUG oslo_vmware.api [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Task: {'id': task-375305, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.703265] env[61273]: DEBUG oslo_concurrency.lockutils [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.362s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 734.703691] env[61273]: DEBUG nova.compute.manager [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 734.705427] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Expecting reply to msg a3c0ed61f0f9485fa4b01cbe00adbeb0 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 734.706552] env[61273]: DEBUG oslo_concurrency.lockutils [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.591s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 734.708057] env[61273]: INFO nova.compute.claims [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 734.709826] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] Expecting reply to msg f5a8f0762c6f4a68ae0e58faebde7d13 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 734.739401] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a3c0ed61f0f9485fa4b01cbe00adbeb0 [ 734.754992] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f5a8f0762c6f4a68ae0e58faebde7d13 [ 734.802500] env[61273]: DEBUG oslo_vmware.api [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Task: {'id': task-375304, 'name': PowerOnVM_Task, 'duration_secs': 0.41309} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.802788] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: ca8a38c7-a81c-407a-9558-3d15e492d9fa] Powered on the VM {{(pid=61273) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 734.802990] env[61273]: INFO nova.compute.manager [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: ca8a38c7-a81c-407a-9558-3d15e492d9fa] Took 4.81 seconds to spawn the instance on the hypervisor. [ 734.803167] env[61273]: DEBUG nova.compute.manager [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: ca8a38c7-a81c-407a-9558-3d15e492d9fa] Checking state {{(pid=61273) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 734.803955] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4621427-6f8f-4966-9106-2f5cde596811 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.813230] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Expecting reply to msg 6395f74a4d4a4771a052324c936a0822 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 734.850337] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6395f74a4d4a4771a052324c936a0822 [ 734.974575] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] Expecting reply to msg 0c081d9c069f45f4847cbca7e73e5fa1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 735.007372] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0c081d9c069f45f4847cbca7e73e5fa1 [ 735.100815] env[61273]: DEBUG oslo_vmware.api [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Task: {'id': task-375305, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.473454} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.101694] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk to [datastore1] 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d/1efd9cfe-3a0c-412c-aa44-3bf650d08f9d.vmdk {{(pid=61273) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 735.101694] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Extending root virtual disk to 1048576 {{(pid=61273) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 735.101694] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9fba3c5f-a06a-4272-9570-a297a4fc05a1 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.108295] env[61273]: DEBUG oslo_vmware.api [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Waiting for the task: (returnval){ [ 735.108295] env[61273]: value = "task-375306" [ 735.108295] env[61273]: _type = "Task" [ 735.108295] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.116734] env[61273]: DEBUG oslo_vmware.api [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Task: {'id': task-375306, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.214498] env[61273]: DEBUG nova.compute.utils [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 735.215013] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Expecting reply to msg f8cff2b6e13441479619534850934529 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 735.217154] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] Expecting reply to msg edbec28d8b4f45729da4bbac0f5ea43b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 735.218091] env[61273]: DEBUG nova.compute.manager [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 735.218258] env[61273]: DEBUG nova.network.neutron [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 735.238703] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f8cff2b6e13441479619534850934529 [ 735.240506] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg edbec28d8b4f45729da4bbac0f5ea43b [ 735.326219] env[61273]: INFO nova.compute.manager [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: ca8a38c7-a81c-407a-9558-3d15e492d9fa] Took 23.51 seconds to build instance. [ 735.326793] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Expecting reply to msg 7758fb8794f240e38a665a327105d8dd in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 735.337863] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7758fb8794f240e38a665a327105d8dd [ 735.387701] env[61273]: DEBUG nova.policy [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0986847129324981ba5df690cc76bcba', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ca33c1550e694c38a15f9be28d24b19c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 735.501368] env[61273]: INFO nova.scheduler.client.report [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] Deleted allocations for instance 23774aa5-1608-495f-8015-29e25f856c69 [ 735.520063] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] Expecting reply to msg b40e6b2a378b4543ae5ee358a8fe1a2c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 735.539023] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b40e6b2a378b4543ae5ee358a8fe1a2c [ 735.618264] env[61273]: DEBUG oslo_vmware.api [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Task: {'id': task-375306, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.059668} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.618622] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Extended root virtual disk {{(pid=61273) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 735.619309] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d75414a3-162e-4983-bdb8-b1671dc9a0cc {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.639012] env[61273]: DEBUG nova.virt.vmwareapi.volumeops [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Reconfiguring VM instance instance-0000002c to attach disk [datastore1] 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d/1efd9cfe-3a0c-412c-aa44-3bf650d08f9d.vmdk or device None with type sparse {{(pid=61273) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 735.639258] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a9375e8a-7d47-4fb9-911d-ea827d0d2a11 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.658830] env[61273]: DEBUG oslo_vmware.api [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Waiting for the task: (returnval){ [ 735.658830] env[61273]: value = "task-375307" [ 735.658830] env[61273]: _type = "Task" [ 735.658830] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.666737] env[61273]: DEBUG oslo_vmware.api [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Task: {'id': task-375307, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.718967] env[61273]: DEBUG nova.compute.manager [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 735.720750] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Expecting reply to msg fcfa4a3d59a54ea9b0e272ea62aa34e0 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 735.781330] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fcfa4a3d59a54ea9b0e272ea62aa34e0 [ 735.828930] env[61273]: DEBUG oslo_concurrency.lockutils [None req-99d429a5-56ce-4340-aadf-504939f66379 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Lock "ca8a38c7-a81c-407a-9558-3d15e492d9fa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 117.467s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 735.829545] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] Expecting reply to msg 1c0890438e344ce29228e2f2cb644b34 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 735.831356] env[61273]: DEBUG nova.network.neutron [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] Successfully created port: 7eb3b801-5a05-42ee-8612-36ba45119473 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 735.841606] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1c0890438e344ce29228e2f2cb644b34 [ 736.029595] env[61273]: DEBUG oslo_concurrency.lockutils [None req-24c97928-8313-4706-8c5b-9b40234d869d tempest-ImagesOneServerTestJSON-1243653497 tempest-ImagesOneServerTestJSON-1243653497-project-member] Lock "23774aa5-1608-495f-8015-29e25f856c69" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 121.773s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 736.030188] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg 90e7e02a60904388839c21a23ce78d3e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 736.044187] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 90e7e02a60904388839c21a23ce78d3e [ 736.069840] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90ddb207-6a72-46ee-a579-195d154f5131 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.077447] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-563fdd37-b99c-4f76-aebe-b352a3539d1b {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.106420] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfee6ee1-e92e-49ec-841c-fed2684ce5e7 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.113001] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-438b7d6e-c360-46c6-91b6-58490da90cc0 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.125624] env[61273]: DEBUG nova.compute.provider_tree [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 736.126095] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] Expecting reply to msg 994e6a8e39574d99b1acc4c21a50f900 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 736.132930] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 994e6a8e39574d99b1acc4c21a50f900 [ 736.167231] env[61273]: DEBUG oslo_vmware.api [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Task: {'id': task-375307, 'name': ReconfigVM_Task, 'duration_secs': 0.293264} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.167500] env[61273]: DEBUG nova.virt.vmwareapi.volumeops [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Reconfigured VM instance instance-0000002c to attach disk [datastore1] 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d/1efd9cfe-3a0c-412c-aa44-3bf650d08f9d.vmdk or device None with type sparse {{(pid=61273) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 736.168128] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4180a87f-c045-415e-bf1b-0f006cb2a13e {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.174280] env[61273]: DEBUG oslo_vmware.api [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Waiting for the task: (returnval){ [ 736.174280] env[61273]: value = "task-375308" [ 736.174280] env[61273]: _type = "Task" [ 736.174280] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.181592] env[61273]: DEBUG oslo_vmware.api [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Task: {'id': task-375308, 'name': Rename_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.224929] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Expecting reply to msg 2a4f24885dec4f78975974211c5f3f38 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 736.261581] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2a4f24885dec4f78975974211c5f3f38 [ 736.334659] env[61273]: DEBUG nova.compute.manager [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 736.336363] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] Expecting reply to msg ee43c4c0be5d4c018d3ebb654e4329d2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 736.369714] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ee43c4c0be5d4c018d3ebb654e4329d2 [ 736.532632] env[61273]: DEBUG nova.compute.manager [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 736.534445] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg 33426949418c4e46a5c18b6c856290db in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 736.575756] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 33426949418c4e46a5c18b6c856290db [ 736.628872] env[61273]: DEBUG nova.scheduler.client.report [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 736.631586] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] Expecting reply to msg bf7c27a7bfc04268b535118649ab0543 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 736.647448] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bf7c27a7bfc04268b535118649ab0543 [ 736.685579] env[61273]: DEBUG oslo_vmware.api [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Task: {'id': task-375308, 'name': Rename_Task, 'duration_secs': 0.158413} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.685579] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Powering on the VM {{(pid=61273) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 736.685579] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-620fa146-ee2a-432a-a1fa-754bf8b97631 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.691415] env[61273]: DEBUG oslo_vmware.api [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Waiting for the task: (returnval){ [ 736.691415] env[61273]: value = "task-375309" [ 736.691415] env[61273]: _type = "Task" [ 736.691415] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.698888] env[61273]: DEBUG oslo_vmware.api [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Task: {'id': task-375309, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.700951] env[61273]: DEBUG nova.compute.manager [req-7cb541d4-281b-4122-89c4-a5e499d7764a req-3ad4933a-fbe4-4b62-bba7-ab7171d77975 service nova] [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] Received event network-changed-7eb3b801-5a05-42ee-8612-36ba45119473 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 736.701148] env[61273]: DEBUG nova.compute.manager [req-7cb541d4-281b-4122-89c4-a5e499d7764a req-3ad4933a-fbe4-4b62-bba7-ab7171d77975 service nova] [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] Refreshing instance network info cache due to event network-changed-7eb3b801-5a05-42ee-8612-36ba45119473. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 736.701381] env[61273]: DEBUG oslo_concurrency.lockutils [req-7cb541d4-281b-4122-89c4-a5e499d7764a req-3ad4933a-fbe4-4b62-bba7-ab7171d77975 service nova] Acquiring lock "refresh_cache-799d4a06-f7a3-4b92-8e96-ac076848fdd3" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 736.701561] env[61273]: DEBUG oslo_concurrency.lockutils [req-7cb541d4-281b-4122-89c4-a5e499d7764a req-3ad4933a-fbe4-4b62-bba7-ab7171d77975 service nova] Acquired lock "refresh_cache-799d4a06-f7a3-4b92-8e96-ac076848fdd3" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 736.701789] env[61273]: DEBUG nova.network.neutron [req-7cb541d4-281b-4122-89c4-a5e499d7764a req-3ad4933a-fbe4-4b62-bba7-ab7171d77975 service nova] [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] Refreshing network info cache for port 7eb3b801-5a05-42ee-8612-36ba45119473 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 736.702235] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-7cb541d4-281b-4122-89c4-a5e499d7764a req-3ad4933a-fbe4-4b62-bba7-ab7171d77975 service nova] Expecting reply to msg 3775ed7156164c06ad25f31b9d6e7039 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 736.710124] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3775ed7156164c06ad25f31b9d6e7039 [ 736.728492] env[61273]: DEBUG nova.compute.manager [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 736.758240] env[61273]: DEBUG nova.virt.hardware [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 736.758533] env[61273]: DEBUG nova.virt.hardware [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 736.758794] env[61273]: DEBUG nova.virt.hardware [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 736.759057] env[61273]: DEBUG nova.virt.hardware [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 736.759290] env[61273]: DEBUG nova.virt.hardware [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 736.759532] env[61273]: DEBUG nova.virt.hardware [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 736.759846] env[61273]: DEBUG nova.virt.hardware [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 736.760130] env[61273]: DEBUG nova.virt.hardware [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 736.760410] env[61273]: DEBUG nova.virt.hardware [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 736.760677] env[61273]: DEBUG nova.virt.hardware [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 736.760951] env[61273]: DEBUG nova.virt.hardware [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 736.762265] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7e0bb6b-744c-47d7-ad81-26102cdb0a02 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.772553] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-925af03b-ffdb-413e-8bbc-9438ca608f41 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.857556] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 736.858632] env[61273]: ERROR nova.compute.manager [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 7eb3b801-5a05-42ee-8612-36ba45119473, please check neutron logs for more information. [ 736.858632] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 736.858632] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 736.858632] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 736.858632] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 736.858632] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 736.858632] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 736.858632] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 736.858632] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 736.858632] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 736.858632] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 736.858632] env[61273]: ERROR nova.compute.manager raise self.value [ 736.858632] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 736.858632] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 736.858632] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 736.858632] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 736.859196] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 736.859196] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 736.859196] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 7eb3b801-5a05-42ee-8612-36ba45119473, please check neutron logs for more information. [ 736.859196] env[61273]: ERROR nova.compute.manager [ 736.859196] env[61273]: Traceback (most recent call last): [ 736.859196] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 736.859196] env[61273]: listener.cb(fileno) [ 736.859196] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 736.859196] env[61273]: result = function(*args, **kwargs) [ 736.859196] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 736.859196] env[61273]: return func(*args, **kwargs) [ 736.859196] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 736.859196] env[61273]: raise e [ 736.859196] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 736.859196] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 736.859196] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 736.859196] env[61273]: created_port_ids = self._update_ports_for_instance( [ 736.859196] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 736.859196] env[61273]: with excutils.save_and_reraise_exception(): [ 736.859196] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 736.859196] env[61273]: self.force_reraise() [ 736.859196] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 736.859196] env[61273]: raise self.value [ 736.859196] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 736.859196] env[61273]: updated_port = self._update_port( [ 736.859196] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 736.859196] env[61273]: _ensure_no_port_binding_failure(port) [ 736.859196] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 736.859196] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 736.860145] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 7eb3b801-5a05-42ee-8612-36ba45119473, please check neutron logs for more information. [ 736.860145] env[61273]: Removing descriptor: 19 [ 736.860145] env[61273]: ERROR nova.compute.manager [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 7eb3b801-5a05-42ee-8612-36ba45119473, please check neutron logs for more information. [ 736.860145] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] Traceback (most recent call last): [ 736.860145] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 736.860145] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] yield resources [ 736.860145] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 736.860145] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] self.driver.spawn(context, instance, image_meta, [ 736.860145] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 736.860145] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 736.860145] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 736.860145] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] vm_ref = self.build_virtual_machine(instance, [ 736.860567] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 736.860567] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] vif_infos = vmwarevif.get_vif_info(self._session, [ 736.860567] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 736.860567] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] for vif in network_info: [ 736.860567] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 736.860567] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] return self._sync_wrapper(fn, *args, **kwargs) [ 736.860567] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 736.860567] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] self.wait() [ 736.860567] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 736.860567] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] self[:] = self._gt.wait() [ 736.860567] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 736.860567] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] return self._exit_event.wait() [ 736.860567] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 736.860979] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] result = hub.switch() [ 736.860979] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 736.860979] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] return self.greenlet.switch() [ 736.860979] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 736.860979] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] result = function(*args, **kwargs) [ 736.860979] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 736.860979] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] return func(*args, **kwargs) [ 736.860979] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 736.860979] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] raise e [ 736.860979] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 736.860979] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] nwinfo = self.network_api.allocate_for_instance( [ 736.860979] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 736.860979] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] created_port_ids = self._update_ports_for_instance( [ 736.861394] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 736.861394] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] with excutils.save_and_reraise_exception(): [ 736.861394] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 736.861394] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] self.force_reraise() [ 736.861394] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 736.861394] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] raise self.value [ 736.861394] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 736.861394] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] updated_port = self._update_port( [ 736.861394] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 736.861394] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] _ensure_no_port_binding_failure(port) [ 736.861394] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 736.861394] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] raise exception.PortBindingFailed(port_id=port['id']) [ 736.861858] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] nova.exception.PortBindingFailed: Binding failed for port 7eb3b801-5a05-42ee-8612-36ba45119473, please check neutron logs for more information. [ 736.861858] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] [ 736.861858] env[61273]: INFO nova.compute.manager [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] Terminating instance [ 736.863254] env[61273]: DEBUG oslo_concurrency.lockutils [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Acquiring lock "refresh_cache-799d4a06-f7a3-4b92-8e96-ac076848fdd3" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 737.053957] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 737.136197] env[61273]: DEBUG oslo_concurrency.lockutils [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.430s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 737.136742] env[61273]: DEBUG nova.compute.manager [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 737.138603] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] Expecting reply to msg 66d4276fb1cc4fcab2a684524b088560 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 737.139850] env[61273]: DEBUG oslo_concurrency.lockutils [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.016s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 737.141250] env[61273]: INFO nova.compute.claims [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 737.143011] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg a863b0887f35469296bff14cebf221fe in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 737.178506] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 66d4276fb1cc4fcab2a684524b088560 [ 737.183166] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a863b0887f35469296bff14cebf221fe [ 737.200680] env[61273]: DEBUG oslo_vmware.api [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Task: {'id': task-375309, 'name': PowerOnVM_Task, 'duration_secs': 0.42183} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.200952] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Powered on the VM {{(pid=61273) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 737.201215] env[61273]: INFO nova.compute.manager [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Took 4.77 seconds to spawn the instance on the hypervisor. [ 737.201307] env[61273]: DEBUG nova.compute.manager [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Checking state {{(pid=61273) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 737.202120] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f33ef75-f283-40d0-9842-fae311c06e04 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.211452] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Expecting reply to msg baae17a476b949568c86b13bd967067f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 737.220618] env[61273]: DEBUG nova.network.neutron [req-7cb541d4-281b-4122-89c4-a5e499d7764a req-3ad4933a-fbe4-4b62-bba7-ab7171d77975 service nova] [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 737.257228] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg baae17a476b949568c86b13bd967067f [ 737.317649] env[61273]: DEBUG nova.network.neutron [req-7cb541d4-281b-4122-89c4-a5e499d7764a req-3ad4933a-fbe4-4b62-bba7-ab7171d77975 service nova] [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 737.318198] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-7cb541d4-281b-4122-89c4-a5e499d7764a req-3ad4933a-fbe4-4b62-bba7-ab7171d77975 service nova] Expecting reply to msg df052b05875a4876a501fa8b42c3c5b2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 737.329087] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg df052b05875a4876a501fa8b42c3c5b2 [ 737.646923] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg bc909a6db4c64da4b8e5d61358328377 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 737.648553] env[61273]: DEBUG nova.compute.utils [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 737.649116] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] Expecting reply to msg 5d810a8d488d46b894cc50975df0675d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 737.649924] env[61273]: DEBUG nova.compute.manager [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 737.650088] env[61273]: DEBUG nova.network.neutron [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 737.662007] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bc909a6db4c64da4b8e5d61358328377 [ 737.689004] env[61273]: DEBUG nova.policy [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'eee3c5a564a24daf92c4affc3607cbc7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '80cf26a75a1a420a85bdf5f723c98229', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 737.709769] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5d810a8d488d46b894cc50975df0675d [ 737.724417] env[61273]: INFO nova.compute.manager [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Took 23.96 seconds to build instance. [ 737.725264] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Expecting reply to msg 3113476f97f741ffa2b20a05b4062120 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 737.741517] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3113476f97f741ffa2b20a05b4062120 [ 737.821392] env[61273]: DEBUG oslo_concurrency.lockutils [req-7cb541d4-281b-4122-89c4-a5e499d7764a req-3ad4933a-fbe4-4b62-bba7-ab7171d77975 service nova] Releasing lock "refresh_cache-799d4a06-f7a3-4b92-8e96-ac076848fdd3" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 737.821684] env[61273]: DEBUG oslo_concurrency.lockutils [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Acquired lock "refresh_cache-799d4a06-f7a3-4b92-8e96-ac076848fdd3" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 737.822403] env[61273]: DEBUG nova.network.neutron [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 737.822403] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Expecting reply to msg 2750d8324d434df2ba3aed1a58d4d34c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 737.830198] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2750d8324d434df2ba3aed1a58d4d34c [ 738.148009] env[61273]: DEBUG nova.network.neutron [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] Successfully created port: d0b95e54-3f33-48d6-9f74-c633d85b9772 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 738.157154] env[61273]: DEBUG nova.compute.manager [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 738.159475] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] Expecting reply to msg 90fba15fb66e449391305baf294863eb in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 738.201645] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 90fba15fb66e449391305baf294863eb [ 738.229190] env[61273]: DEBUG oslo_concurrency.lockutils [None req-89f69888-e995-47ed-8ffb-76173eb68b00 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Lock "1efd9cfe-3a0c-412c-aa44-3bf650d08f9d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 119.178s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 738.229749] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg 454a727f3648423eaab9629f9ec19f0e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 738.243295] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 454a727f3648423eaab9629f9ec19f0e [ 738.342209] env[61273]: DEBUG nova.network.neutron [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 738.384279] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Expecting reply to msg d2fb0b4a32f94927a672c33a8da43aba in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 738.395119] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d2fb0b4a32f94927a672c33a8da43aba [ 738.454622] env[61273]: DEBUG nova.network.neutron [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 738.455178] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Expecting reply to msg 1f33336e794242a2aaddb7070bf284db in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 738.463978] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1f33336e794242a2aaddb7070bf284db [ 738.499213] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87cc00f9-fec8-4297-9da5-b60a9d70e092 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.506996] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3657259-a89f-42f0-afac-671cb8e7738a {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.549886] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff9dde46-35e6-4893-bbc8-b9d630ef8881 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.557361] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-588b2643-2c83-4ec3-abab-84cec974d035 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.574137] env[61273]: DEBUG nova.compute.provider_tree [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 738.574652] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 5c2d47025a954c21a3fc44e9b62d68c0 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 738.583862] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5c2d47025a954c21a3fc44e9b62d68c0 [ 738.664943] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] Expecting reply to msg 4d1423aacc3a4a78869e2879da73ef47 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 738.700315] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4d1423aacc3a4a78869e2879da73ef47 [ 738.732434] env[61273]: DEBUG nova.compute.manager [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 738.734348] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg 27ade793b20c4ef781acc86475780ac9 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 738.807807] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 27ade793b20c4ef781acc86475780ac9 [ 738.810010] env[61273]: DEBUG nova.compute.manager [req-8b3c322e-25ea-4ea2-b99b-da779ce2e427 req-a57191db-f8e6-4a06-8255-02738583cf05 service nova] [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] Received event network-vif-deleted-7eb3b801-5a05-42ee-8612-36ba45119473 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 738.886629] env[61273]: INFO nova.compute.manager [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Rebuilding instance [ 738.938433] env[61273]: DEBUG nova.compute.manager [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Checking state {{(pid=61273) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 738.939384] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e8e9c3f-7543-45d0-9ecb-d040ee846bd0 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.948137] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Expecting reply to msg 5bd53a79c6fb414bacd33e86d60d129a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 738.957874] env[61273]: DEBUG oslo_concurrency.lockutils [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Releasing lock "refresh_cache-799d4a06-f7a3-4b92-8e96-ac076848fdd3" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 738.958394] env[61273]: DEBUG nova.compute.manager [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 738.958602] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 738.958921] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3b24f8f3-2e2f-4899-8f0c-2d5aa08a3db1 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.969075] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5535196-f318-4afe-8759-c0978c5c0cbf {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.982455] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5bd53a79c6fb414bacd33e86d60d129a [ 738.993520] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 799d4a06-f7a3-4b92-8e96-ac076848fdd3 could not be found. [ 738.993752] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 738.993937] env[61273]: INFO nova.compute.manager [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] Took 0.04 seconds to destroy the instance on the hypervisor. [ 738.994212] env[61273]: DEBUG oslo.service.loopingcall [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 738.994443] env[61273]: DEBUG nova.compute.manager [-] [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 738.994533] env[61273]: DEBUG nova.network.neutron [-] [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 739.009661] env[61273]: DEBUG nova.network.neutron [-] [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 739.010347] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg c1b7522cbf764d67af44766d617a1817 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 739.023954] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c1b7522cbf764d67af44766d617a1817 [ 739.077837] env[61273]: DEBUG nova.scheduler.client.report [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 739.079997] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 1edec2a1ac5a4fe58643d5ca7e46c7d8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 739.096414] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1edec2a1ac5a4fe58643d5ca7e46c7d8 [ 739.168304] env[61273]: DEBUG nova.compute.manager [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 739.191256] env[61273]: DEBUG nova.virt.hardware [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 739.191510] env[61273]: DEBUG nova.virt.hardware [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 739.191777] env[61273]: DEBUG nova.virt.hardware [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 739.191944] env[61273]: DEBUG nova.virt.hardware [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 739.192064] env[61273]: DEBUG nova.virt.hardware [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 739.192219] env[61273]: DEBUG nova.virt.hardware [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 739.192420] env[61273]: DEBUG nova.virt.hardware [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 739.192576] env[61273]: DEBUG nova.virt.hardware [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 739.192745] env[61273]: DEBUG nova.virt.hardware [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 739.192908] env[61273]: DEBUG nova.virt.hardware [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 739.193079] env[61273]: DEBUG nova.virt.hardware [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 739.193957] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1d91864-6a2a-4117-9788-4f964bd8ea99 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.202039] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-940486b6-ea41-4829-8219-0c75d326377c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.251432] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 739.452236] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Powering off the VM {{(pid=61273) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 739.452543] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f3a1f358-3b71-4b96-9ff2-e7361cddbe6d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.462369] env[61273]: DEBUG oslo_vmware.api [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Waiting for the task: (returnval){ [ 739.462369] env[61273]: value = "task-375310" [ 739.462369] env[61273]: _type = "Task" [ 739.462369] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.466481] env[61273]: ERROR nova.compute.manager [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port d0b95e54-3f33-48d6-9f74-c633d85b9772, please check neutron logs for more information. [ 739.466481] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 739.466481] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 739.466481] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 739.466481] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 739.466481] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 739.466481] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 739.466481] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 739.466481] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 739.466481] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 739.466481] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 739.466481] env[61273]: ERROR nova.compute.manager raise self.value [ 739.466481] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 739.466481] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 739.466481] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 739.466481] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 739.467038] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 739.467038] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 739.467038] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port d0b95e54-3f33-48d6-9f74-c633d85b9772, please check neutron logs for more information. [ 739.467038] env[61273]: ERROR nova.compute.manager [ 739.467038] env[61273]: Traceback (most recent call last): [ 739.467038] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 739.467038] env[61273]: listener.cb(fileno) [ 739.467038] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 739.467038] env[61273]: result = function(*args, **kwargs) [ 739.467038] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 739.467038] env[61273]: return func(*args, **kwargs) [ 739.467038] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 739.467038] env[61273]: raise e [ 739.467038] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 739.467038] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 739.467038] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 739.467038] env[61273]: created_port_ids = self._update_ports_for_instance( [ 739.467038] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 739.467038] env[61273]: with excutils.save_and_reraise_exception(): [ 739.467038] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 739.467038] env[61273]: self.force_reraise() [ 739.467038] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 739.467038] env[61273]: raise self.value [ 739.467038] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 739.467038] env[61273]: updated_port = self._update_port( [ 739.467038] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 739.467038] env[61273]: _ensure_no_port_binding_failure(port) [ 739.467038] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 739.467038] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 739.467892] env[61273]: nova.exception.PortBindingFailed: Binding failed for port d0b95e54-3f33-48d6-9f74-c633d85b9772, please check neutron logs for more information. [ 739.467892] env[61273]: Removing descriptor: 19 [ 739.467892] env[61273]: ERROR nova.compute.manager [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port d0b95e54-3f33-48d6-9f74-c633d85b9772, please check neutron logs for more information. [ 739.467892] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] Traceback (most recent call last): [ 739.467892] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 739.467892] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] yield resources [ 739.467892] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 739.467892] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] self.driver.spawn(context, instance, image_meta, [ 739.467892] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 739.467892] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 739.467892] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 739.467892] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] vm_ref = self.build_virtual_machine(instance, [ 739.468303] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 739.468303] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] vif_infos = vmwarevif.get_vif_info(self._session, [ 739.468303] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 739.468303] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] for vif in network_info: [ 739.468303] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 739.468303] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] return self._sync_wrapper(fn, *args, **kwargs) [ 739.468303] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 739.468303] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] self.wait() [ 739.468303] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 739.468303] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] self[:] = self._gt.wait() [ 739.468303] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 739.468303] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] return self._exit_event.wait() [ 739.468303] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 739.468744] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] result = hub.switch() [ 739.468744] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 739.468744] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] return self.greenlet.switch() [ 739.468744] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 739.468744] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] result = function(*args, **kwargs) [ 739.468744] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 739.468744] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] return func(*args, **kwargs) [ 739.468744] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 739.468744] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] raise e [ 739.468744] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 739.468744] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] nwinfo = self.network_api.allocate_for_instance( [ 739.468744] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 739.468744] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] created_port_ids = self._update_ports_for_instance( [ 739.469177] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 739.469177] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] with excutils.save_and_reraise_exception(): [ 739.469177] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 739.469177] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] self.force_reraise() [ 739.469177] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 739.469177] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] raise self.value [ 739.469177] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 739.469177] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] updated_port = self._update_port( [ 739.469177] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 739.469177] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] _ensure_no_port_binding_failure(port) [ 739.469177] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 739.469177] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] raise exception.PortBindingFailed(port_id=port['id']) [ 739.469630] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] nova.exception.PortBindingFailed: Binding failed for port d0b95e54-3f33-48d6-9f74-c633d85b9772, please check neutron logs for more information. [ 739.469630] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] [ 739.469630] env[61273]: INFO nova.compute.manager [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] Terminating instance [ 739.472341] env[61273]: DEBUG oslo_vmware.api [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Task: {'id': task-375310, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.472843] env[61273]: DEBUG oslo_concurrency.lockutils [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] Acquiring lock "refresh_cache-21213cff-55b3-48fd-91b4-6718f7819bc3" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 739.472997] env[61273]: DEBUG oslo_concurrency.lockutils [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] Acquired lock "refresh_cache-21213cff-55b3-48fd-91b4-6718f7819bc3" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 739.473159] env[61273]: DEBUG nova.network.neutron [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 739.473575] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] Expecting reply to msg f393e4fc02df4f57bd514d67d19223ef in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 739.480547] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f393e4fc02df4f57bd514d67d19223ef [ 739.511893] env[61273]: DEBUG nova.network.neutron [-] [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 739.512414] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 3820e8e6aa2a4d65954d98f621c7d3a4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 739.521661] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3820e8e6aa2a4d65954d98f621c7d3a4 [ 739.582541] env[61273]: DEBUG oslo_concurrency.lockutils [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.443s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 739.583093] env[61273]: DEBUG nova.compute.manager [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 739.584867] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg cef3a3dea8274eb7adbafe7acd16249c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 739.585928] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.564s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 739.587294] env[61273]: INFO nova.compute.claims [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 739.588782] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] Expecting reply to msg 6589562e7ba7400dbea30b9af6e8d90e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 739.617143] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cef3a3dea8274eb7adbafe7acd16249c [ 739.622094] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6589562e7ba7400dbea30b9af6e8d90e [ 739.973257] env[61273]: DEBUG oslo_vmware.api [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Task: {'id': task-375310, 'name': PowerOffVM_Task, 'duration_secs': 0.185613} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.973478] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Powered off the VM {{(pid=61273) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 739.973694] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 739.974549] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f8ea98a-8a7e-4337-8dd9-acfb25e826c5 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.983555] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Unregistering the VM {{(pid=61273) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 739.983814] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dd43d0be-95bb-467f-8c56-381aba7bf9e1 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.991237] env[61273]: DEBUG nova.network.neutron [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 740.009835] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Unregistered the VM {{(pid=61273) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 740.010053] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Deleting contents of the VM from datastore datastore1 {{(pid=61273) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 740.010203] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Deleting the datastore file [datastore1] 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d {{(pid=61273) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 740.010453] env[61273]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3a01c54d-9ee1-43b0-af45-d8045925add8 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.015895] env[61273]: INFO nova.compute.manager [-] [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] Took 1.02 seconds to deallocate network for instance. [ 740.018614] env[61273]: DEBUG oslo_vmware.api [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Waiting for the task: (returnval){ [ 740.018614] env[61273]: value = "task-375312" [ 740.018614] env[61273]: _type = "Task" [ 740.018614] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.019121] env[61273]: DEBUG nova.compute.claims [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 740.019288] env[61273]: DEBUG oslo_concurrency.lockutils [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 740.027085] env[61273]: DEBUG oslo_vmware.api [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Task: {'id': task-375312, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.067008] env[61273]: DEBUG nova.network.neutron [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.068361] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] Expecting reply to msg 2bdcbe340e7a4d38ad3e41cc11bce8d4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 740.078682] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2bdcbe340e7a4d38ad3e41cc11bce8d4 [ 740.091223] env[61273]: DEBUG nova.compute.utils [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 740.091864] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 85591f0a6c7b428fb6a799c57064e546 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 740.097174] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] Expecting reply to msg 8e8ea604a62a40028c6e0be8421149c5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 740.097174] env[61273]: DEBUG nova.compute.manager [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 740.097174] env[61273]: DEBUG nova.network.neutron [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 740.101837] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8e8ea604a62a40028c6e0be8421149c5 [ 740.102253] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 85591f0a6c7b428fb6a799c57064e546 [ 740.140893] env[61273]: DEBUG nova.policy [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c471664da5894985bf7478057ea19b73', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a3de421e0f994df8b809ce0096753f23', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 740.450104] env[61273]: DEBUG nova.network.neutron [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] Successfully created port: 0a0b4bd2-3d3f-440c-98f7-c40827ffa1df {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 740.529677] env[61273]: DEBUG oslo_vmware.api [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Task: {'id': task-375312, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.088385} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.529952] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Deleted the datastore file {{(pid=61273) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 740.530261] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Deleted contents of the VM from datastore datastore1 {{(pid=61273) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 740.530351] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 740.531861] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Expecting reply to msg f4536e61a7f9421ca56cd9ee84e0f7bf in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 740.565573] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f4536e61a7f9421ca56cd9ee84e0f7bf [ 740.569574] env[61273]: DEBUG oslo_concurrency.lockutils [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] Releasing lock "refresh_cache-21213cff-55b3-48fd-91b4-6718f7819bc3" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 740.570214] env[61273]: DEBUG nova.compute.manager [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 740.570536] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 740.571267] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1e5a796a-548e-4feb-92ed-16b56a56b93b {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.580327] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0f150cd-88e9-4476-9462-ec00004adf82 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.601933] env[61273]: DEBUG nova.compute.manager [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 740.604150] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 9338c511f3af49c9a3bf5c33095c24cb in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 740.611007] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 21213cff-55b3-48fd-91b4-6718f7819bc3 could not be found. [ 740.611354] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 740.611665] env[61273]: INFO nova.compute.manager [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] Took 0.04 seconds to destroy the instance on the hypervisor. [ 740.612059] env[61273]: DEBUG oslo.service.loopingcall [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 740.612658] env[61273]: DEBUG nova.compute.manager [-] [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 740.612861] env[61273]: DEBUG nova.network.neutron [-] [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 740.646996] env[61273]: DEBUG nova.network.neutron [-] [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 740.648057] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 682c08c1e2f54d898e2bafefdc448832 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 740.649360] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9338c511f3af49c9a3bf5c33095c24cb [ 740.657143] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 682c08c1e2f54d898e2bafefdc448832 [ 740.842461] env[61273]: DEBUG nova.compute.manager [req-6f87e54f-a232-485a-b7f9-8083393a9874 req-42e421e7-6769-40fb-bddd-2cef688e24c0 service nova] [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] Received event network-changed-d0b95e54-3f33-48d6-9f74-c633d85b9772 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 740.842656] env[61273]: DEBUG nova.compute.manager [req-6f87e54f-a232-485a-b7f9-8083393a9874 req-42e421e7-6769-40fb-bddd-2cef688e24c0 service nova] [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] Refreshing instance network info cache due to event network-changed-d0b95e54-3f33-48d6-9f74-c633d85b9772. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 740.842882] env[61273]: DEBUG oslo_concurrency.lockutils [req-6f87e54f-a232-485a-b7f9-8083393a9874 req-42e421e7-6769-40fb-bddd-2cef688e24c0 service nova] Acquiring lock "refresh_cache-21213cff-55b3-48fd-91b4-6718f7819bc3" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 740.842994] env[61273]: DEBUG oslo_concurrency.lockutils [req-6f87e54f-a232-485a-b7f9-8083393a9874 req-42e421e7-6769-40fb-bddd-2cef688e24c0 service nova] Acquired lock "refresh_cache-21213cff-55b3-48fd-91b4-6718f7819bc3" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 740.843282] env[61273]: DEBUG nova.network.neutron [req-6f87e54f-a232-485a-b7f9-8083393a9874 req-42e421e7-6769-40fb-bddd-2cef688e24c0 service nova] [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] Refreshing network info cache for port d0b95e54-3f33-48d6-9f74-c633d85b9772 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 740.843608] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-6f87e54f-a232-485a-b7f9-8083393a9874 req-42e421e7-6769-40fb-bddd-2cef688e24c0 service nova] Expecting reply to msg a030afe867a34b6a93a803203ec56238 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 740.854883] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a030afe867a34b6a93a803203ec56238 [ 740.946345] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d876241a-5b46-4bf3-880e-3916975cb48f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.954233] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb52d7bc-bb47-4bcc-a8bc-57cb4793e386 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.987001] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e2d8c53-2391-418a-9e6b-03309761601f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.995176] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b0f757d-6807-4bd7-8b04-1701c5828ebe {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.010376] env[61273]: DEBUG nova.compute.provider_tree [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 741.011086] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] Expecting reply to msg 7604996e40d8472b97a715e6debb2259 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 741.018467] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7604996e40d8472b97a715e6debb2259 [ 741.036461] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Expecting reply to msg 80372759621e40099ca7f722b4cebb15 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 741.076023] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 80372759621e40099ca7f722b4cebb15 [ 741.118315] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg bef5c2aab33f48428ec59f171b0ce8bd in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 741.151184] env[61273]: DEBUG nova.network.neutron [-] [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 741.151410] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 21cb26bf40ed41b989beef0497e4ea74 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 741.162786] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 21cb26bf40ed41b989beef0497e4ea74 [ 741.171530] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bef5c2aab33f48428ec59f171b0ce8bd [ 741.365769] env[61273]: DEBUG nova.network.neutron [req-6f87e54f-a232-485a-b7f9-8083393a9874 req-42e421e7-6769-40fb-bddd-2cef688e24c0 service nova] [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 741.447957] env[61273]: DEBUG nova.network.neutron [req-6f87e54f-a232-485a-b7f9-8083393a9874 req-42e421e7-6769-40fb-bddd-2cef688e24c0 service nova] [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 741.448526] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-6f87e54f-a232-485a-b7f9-8083393a9874 req-42e421e7-6769-40fb-bddd-2cef688e24c0 service nova] Expecting reply to msg 24e8cb8c2f9447adb4a0f317212079b1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 741.460177] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 24e8cb8c2f9447adb4a0f317212079b1 [ 741.513760] env[61273]: DEBUG nova.scheduler.client.report [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 741.516325] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] Expecting reply to msg b7995069d1e149ce9250271514229e54 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 741.531578] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b7995069d1e149ce9250271514229e54 [ 741.566517] env[61273]: DEBUG nova.virt.hardware [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 741.566775] env[61273]: DEBUG nova.virt.hardware [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 741.566927] env[61273]: DEBUG nova.virt.hardware [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 741.567100] env[61273]: DEBUG nova.virt.hardware [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 741.567243] env[61273]: DEBUG nova.virt.hardware [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 741.567384] env[61273]: DEBUG nova.virt.hardware [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 741.567583] env[61273]: DEBUG nova.virt.hardware [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 741.567737] env[61273]: DEBUG nova.virt.hardware [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 741.568041] env[61273]: DEBUG nova.virt.hardware [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 741.568230] env[61273]: DEBUG nova.virt.hardware [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 741.568396] env[61273]: DEBUG nova.virt.hardware [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 741.569569] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d05859fe-421a-41b0-9794-34433886f048 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.577666] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfa39db4-f02b-480f-b19e-068ac9080fb9 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.594147] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Instance VIF info [] {{(pid=61273) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 741.599906] env[61273]: DEBUG oslo.service.loopingcall [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 741.600200] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Creating VM on the ESX host {{(pid=61273) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 741.601162] env[61273]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-391268ec-6e5f-42b7-b775-a75c05caede0 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.617211] env[61273]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 741.617211] env[61273]: value = "task-375313" [ 741.617211] env[61273]: _type = "Task" [ 741.617211] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.623161] env[61273]: DEBUG nova.compute.manager [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 741.631055] env[61273]: DEBUG oslo_vmware.api [-] Task: {'id': task-375313, 'name': CreateVM_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.650895] env[61273]: DEBUG nova.virt.hardware [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 741.651201] env[61273]: DEBUG nova.virt.hardware [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 741.651300] env[61273]: DEBUG nova.virt.hardware [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 741.651487] env[61273]: DEBUG nova.virt.hardware [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 741.651674] env[61273]: DEBUG nova.virt.hardware [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 741.651840] env[61273]: DEBUG nova.virt.hardware [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 741.652061] env[61273]: DEBUG nova.virt.hardware [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 741.652219] env[61273]: DEBUG nova.virt.hardware [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 741.652380] env[61273]: DEBUG nova.virt.hardware [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 741.652532] env[61273]: DEBUG nova.virt.hardware [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 741.652767] env[61273]: DEBUG nova.virt.hardware [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 741.653266] env[61273]: INFO nova.compute.manager [-] [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] Took 1.04 seconds to deallocate network for instance. [ 741.654206] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72ade504-f4dd-4dba-8814-1e202dfebac4 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.659444] env[61273]: DEBUG nova.compute.claims [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 741.661896] env[61273]: DEBUG oslo_concurrency.lockutils [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 741.663588] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a49a2d9-4f3d-4127-bd01-72c598994251 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.952031] env[61273]: DEBUG oslo_concurrency.lockutils [req-6f87e54f-a232-485a-b7f9-8083393a9874 req-42e421e7-6769-40fb-bddd-2cef688e24c0 service nova] Releasing lock "refresh_cache-21213cff-55b3-48fd-91b4-6718f7819bc3" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 741.952317] env[61273]: DEBUG nova.compute.manager [req-6f87e54f-a232-485a-b7f9-8083393a9874 req-42e421e7-6769-40fb-bddd-2cef688e24c0 service nova] [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] Received event network-vif-deleted-d0b95e54-3f33-48d6-9f74-c633d85b9772 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 742.002255] env[61273]: ERROR nova.compute.manager [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 0a0b4bd2-3d3f-440c-98f7-c40827ffa1df, please check neutron logs for more information. [ 742.002255] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 742.002255] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 742.002255] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 742.002255] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 742.002255] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 742.002255] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 742.002255] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 742.002255] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 742.002255] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 742.002255] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 742.002255] env[61273]: ERROR nova.compute.manager raise self.value [ 742.002255] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 742.002255] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 742.002255] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 742.002255] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 742.003178] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 742.003178] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 742.003178] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 0a0b4bd2-3d3f-440c-98f7-c40827ffa1df, please check neutron logs for more information. [ 742.003178] env[61273]: ERROR nova.compute.manager [ 742.003178] env[61273]: Traceback (most recent call last): [ 742.003178] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 742.003178] env[61273]: listener.cb(fileno) [ 742.003178] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 742.003178] env[61273]: result = function(*args, **kwargs) [ 742.003178] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 742.003178] env[61273]: return func(*args, **kwargs) [ 742.003178] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 742.003178] env[61273]: raise e [ 742.003178] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 742.003178] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 742.003178] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 742.003178] env[61273]: created_port_ids = self._update_ports_for_instance( [ 742.003178] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 742.003178] env[61273]: with excutils.save_and_reraise_exception(): [ 742.003178] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 742.003178] env[61273]: self.force_reraise() [ 742.003178] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 742.003178] env[61273]: raise self.value [ 742.003178] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 742.003178] env[61273]: updated_port = self._update_port( [ 742.003178] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 742.003178] env[61273]: _ensure_no_port_binding_failure(port) [ 742.003178] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 742.003178] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 742.004721] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 0a0b4bd2-3d3f-440c-98f7-c40827ffa1df, please check neutron logs for more information. [ 742.004721] env[61273]: Removing descriptor: 19 [ 742.004721] env[61273]: ERROR nova.compute.manager [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 0a0b4bd2-3d3f-440c-98f7-c40827ffa1df, please check neutron logs for more information. [ 742.004721] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] Traceback (most recent call last): [ 742.004721] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 742.004721] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] yield resources [ 742.004721] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 742.004721] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] self.driver.spawn(context, instance, image_meta, [ 742.004721] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 742.004721] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] self._vmops.spawn(context, instance, image_meta, injected_files, [ 742.004721] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 742.004721] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] vm_ref = self.build_virtual_machine(instance, [ 742.005560] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 742.005560] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] vif_infos = vmwarevif.get_vif_info(self._session, [ 742.005560] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 742.005560] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] for vif in network_info: [ 742.005560] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 742.005560] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] return self._sync_wrapper(fn, *args, **kwargs) [ 742.005560] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 742.005560] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] self.wait() [ 742.005560] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 742.005560] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] self[:] = self._gt.wait() [ 742.005560] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 742.005560] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] return self._exit_event.wait() [ 742.005560] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 742.007378] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] result = hub.switch() [ 742.007378] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 742.007378] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] return self.greenlet.switch() [ 742.007378] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 742.007378] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] result = function(*args, **kwargs) [ 742.007378] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 742.007378] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] return func(*args, **kwargs) [ 742.007378] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 742.007378] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] raise e [ 742.007378] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 742.007378] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] nwinfo = self.network_api.allocate_for_instance( [ 742.007378] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 742.007378] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] created_port_ids = self._update_ports_for_instance( [ 742.008560] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 742.008560] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] with excutils.save_and_reraise_exception(): [ 742.008560] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 742.008560] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] self.force_reraise() [ 742.008560] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 742.008560] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] raise self.value [ 742.008560] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 742.008560] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] updated_port = self._update_port( [ 742.008560] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 742.008560] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] _ensure_no_port_binding_failure(port) [ 742.008560] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 742.008560] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] raise exception.PortBindingFailed(port_id=port['id']) [ 742.010065] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] nova.exception.PortBindingFailed: Binding failed for port 0a0b4bd2-3d3f-440c-98f7-c40827ffa1df, please check neutron logs for more information. [ 742.010065] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] [ 742.010065] env[61273]: INFO nova.compute.manager [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] Terminating instance [ 742.010065] env[61273]: DEBUG oslo_concurrency.lockutils [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Acquiring lock "refresh_cache-f3df4816-ef02-4ecc-a8ca-4f0eaf286218" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 742.010065] env[61273]: DEBUG oslo_concurrency.lockutils [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Acquired lock "refresh_cache-f3df4816-ef02-4ecc-a8ca-4f0eaf286218" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 742.010065] env[61273]: DEBUG nova.network.neutron [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 742.010065] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg dd264cf57a064c76a82557304bc8fcc4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 742.016771] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dd264cf57a064c76a82557304bc8fcc4 [ 742.018956] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.433s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 742.019485] env[61273]: DEBUG nova.compute.manager [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 742.021618] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] Expecting reply to msg dab2f134cf664f7abecb130384c0094c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 742.022367] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 742.024049] env[61273]: INFO nova.compute.claims [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: c376b161-74f9-405a-bb86-516583a9a76f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 742.025361] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 5b89487fea1b46bd8373329ae5b31003 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 742.065915] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dab2f134cf664f7abecb130384c0094c [ 742.074503] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5b89487fea1b46bd8373329ae5b31003 [ 742.128320] env[61273]: DEBUG oslo_vmware.api [-] Task: {'id': task-375313, 'name': CreateVM_Task} progress is 99%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.528742] env[61273]: DEBUG nova.compute.utils [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 742.529369] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] Expecting reply to msg 5cde852e52444b93bc620ce96090f0f2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 742.531419] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg a2f53c282360440aba8e37b6f68451f7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 742.535989] env[61273]: DEBUG nova.network.neutron [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 742.535989] env[61273]: DEBUG nova.compute.manager [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 742.535989] env[61273]: DEBUG nova.network.neutron [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 742.544583] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a2f53c282360440aba8e37b6f68451f7 [ 742.545749] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5cde852e52444b93bc620ce96090f0f2 [ 742.576964] env[61273]: DEBUG nova.policy [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f6529f0831bb4b528385935bd06e9e8f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b2a065da05414c20bd187cbd0cd02b51', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 742.626743] env[61273]: DEBUG oslo_vmware.api [-] Task: {'id': task-375313, 'name': CreateVM_Task} progress is 99%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.637351] env[61273]: DEBUG nova.network.neutron [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 742.637997] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg ba880e3298ed40ab9f2ee0f16f284186 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 742.650522] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ba880e3298ed40ab9f2ee0f16f284186 [ 742.853089] env[61273]: DEBUG nova.network.neutron [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] Successfully created port: 8af34a72-d61f-4969-b3b5-acef24d3c087 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 742.952836] env[61273]: DEBUG nova.compute.manager [req-0f521bca-ee4e-4e64-8f15-e322f3c1cabf req-f8b3fe80-3d42-47f0-8907-a40d06567246 service nova] [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] Received event network-changed-0a0b4bd2-3d3f-440c-98f7-c40827ffa1df {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 742.953061] env[61273]: DEBUG nova.compute.manager [req-0f521bca-ee4e-4e64-8f15-e322f3c1cabf req-f8b3fe80-3d42-47f0-8907-a40d06567246 service nova] [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] Refreshing instance network info cache due to event network-changed-0a0b4bd2-3d3f-440c-98f7-c40827ffa1df. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 742.953252] env[61273]: DEBUG oslo_concurrency.lockutils [req-0f521bca-ee4e-4e64-8f15-e322f3c1cabf req-f8b3fe80-3d42-47f0-8907-a40d06567246 service nova] Acquiring lock "refresh_cache-f3df4816-ef02-4ecc-a8ca-4f0eaf286218" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 743.035613] env[61273]: DEBUG nova.compute.manager [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 743.037518] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] Expecting reply to msg 954fe141c3b84140a08afc0aa26a1689 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 743.095751] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 954fe141c3b84140a08afc0aa26a1689 [ 743.130213] env[61273]: DEBUG oslo_vmware.api [-] Task: {'id': task-375313, 'name': CreateVM_Task, 'duration_secs': 1.439015} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.130381] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Created VM on the ESX host {{(pid=61273) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 743.130794] env[61273]: DEBUG oslo_concurrency.lockutils [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 743.130945] env[61273]: DEBUG oslo_concurrency.lockutils [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 743.131260] env[61273]: DEBUG oslo_concurrency.lockutils [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 743.131829] env[61273]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b9be168-e0e2-49c1-aaab-a37249123e51 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.142987] env[61273]: DEBUG oslo_concurrency.lockutils [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Releasing lock "refresh_cache-f3df4816-ef02-4ecc-a8ca-4f0eaf286218" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 743.144334] env[61273]: DEBUG nova.compute.manager [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 743.145410] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 743.145965] env[61273]: DEBUG oslo_vmware.api [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Waiting for the task: (returnval){ [ 743.145965] env[61273]: value = "session[527e49f3-ce92-061c-8407-29c8a2392124]522a1d3e-f25b-6108-b303-8afe3d268982" [ 743.145965] env[61273]: _type = "Task" [ 743.145965] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.147206] env[61273]: DEBUG oslo_concurrency.lockutils [req-0f521bca-ee4e-4e64-8f15-e322f3c1cabf req-f8b3fe80-3d42-47f0-8907-a40d06567246 service nova] Acquired lock "refresh_cache-f3df4816-ef02-4ecc-a8ca-4f0eaf286218" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 743.147423] env[61273]: DEBUG nova.network.neutron [req-0f521bca-ee4e-4e64-8f15-e322f3c1cabf req-f8b3fe80-3d42-47f0-8907-a40d06567246 service nova] [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] Refreshing network info cache for port 0a0b4bd2-3d3f-440c-98f7-c40827ffa1df {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 743.148716] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-0f521bca-ee4e-4e64-8f15-e322f3c1cabf req-f8b3fe80-3d42-47f0-8907-a40d06567246 service nova] Expecting reply to msg 03326a4c0b2b41e984bcd79ae4e4d2c7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 743.152184] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-65b0ded0-3691-4afd-9615-2107d0fa4208 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.160366] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 03326a4c0b2b41e984bcd79ae4e4d2c7 [ 743.175179] env[61273]: DEBUG oslo_vmware.api [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]522a1d3e-f25b-6108-b303-8afe3d268982, 'name': SearchDatastore_Task, 'duration_secs': 0.013991} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.175854] env[61273]: DEBUG oslo_concurrency.lockutils [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 743.176152] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Processing image 4a9e718e-a6a1-4b4a-b567-8e55529b2d5b {{(pid=61273) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 743.176424] env[61273]: DEBUG oslo_concurrency.lockutils [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 743.176601] env[61273]: DEBUG oslo_concurrency.lockutils [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 743.176808] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61273) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 743.177172] env[61273]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-11e9398a-c585-4217-abde-ed884fc68102 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.181852] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08ff8d9b-8feb-41e5-90ac-8a7789116f9f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.206043] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61273) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 743.206247] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61273) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 743.213836] env[61273]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9aee259c-aad4-4052-ba8c-21710317f5c9 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.216365] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f3df4816-ef02-4ecc-a8ca-4f0eaf286218 could not be found. [ 743.216564] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 743.216740] env[61273]: INFO nova.compute.manager [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] Took 0.07 seconds to destroy the instance on the hypervisor. [ 743.216972] env[61273]: DEBUG oslo.service.loopingcall [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 743.217682] env[61273]: DEBUG nova.compute.manager [-] [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 743.217787] env[61273]: DEBUG nova.network.neutron [-] [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 743.222937] env[61273]: DEBUG oslo_vmware.api [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Waiting for the task: (returnval){ [ 743.222937] env[61273]: value = "session[527e49f3-ce92-061c-8407-29c8a2392124]52f1dc2b-664b-0d3c-6d1c-b2a81c0eff2a" [ 743.222937] env[61273]: _type = "Task" [ 743.222937] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.231397] env[61273]: DEBUG oslo_vmware.api [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]52f1dc2b-664b-0d3c-6d1c-b2a81c0eff2a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.238967] env[61273]: DEBUG nova.network.neutron [-] [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 743.239602] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 6956a4471f684bcaace4dfdc4f634f23 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 743.247845] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6956a4471f684bcaace4dfdc4f634f23 [ 743.421288] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e070098-0423-4dc3-b1ea-0e07e79a30ee {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.429810] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c55e6da-081b-4fb0-b1fd-533b78ef2c1d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.461526] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c66473e-3d1b-4a31-8eb2-c9f861f02584 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.469433] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4042d05-94c3-401a-a05f-c8490d3f0fb4 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.484619] env[61273]: DEBUG nova.compute.provider_tree [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 743.485466] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg fbd518d301c1437aaea0bfd705b49d1f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 743.498450] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fbd518d301c1437aaea0bfd705b49d1f [ 743.546712] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] Expecting reply to msg 9d9b78008ab54deb9732dd4b62aedd2a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 743.564607] env[61273]: DEBUG oslo_concurrency.lockutils [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Acquiring lock "7327b3d9-6f7e-4203-b77b-bc0271e3a6ab" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 743.564842] env[61273]: DEBUG oslo_concurrency.lockutils [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Lock "7327b3d9-6f7e-4203-b77b-bc0271e3a6ab" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 743.580164] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9d9b78008ab54deb9732dd4b62aedd2a [ 743.596567] env[61273]: DEBUG nova.compute.manager [req-59dce922-c750-487f-bc03-348d0c6d9a0a req-e86dc352-7dad-4018-96c9-34281a977656 service nova] [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] Received event network-changed-8af34a72-d61f-4969-b3b5-acef24d3c087 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 743.596753] env[61273]: DEBUG nova.compute.manager [req-59dce922-c750-487f-bc03-348d0c6d9a0a req-e86dc352-7dad-4018-96c9-34281a977656 service nova] [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] Refreshing instance network info cache due to event network-changed-8af34a72-d61f-4969-b3b5-acef24d3c087. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 743.596959] env[61273]: DEBUG oslo_concurrency.lockutils [req-59dce922-c750-487f-bc03-348d0c6d9a0a req-e86dc352-7dad-4018-96c9-34281a977656 service nova] Acquiring lock "refresh_cache-2a7d4872-4ed7-4058-bc36-b199d89a9f14" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 743.597099] env[61273]: DEBUG oslo_concurrency.lockutils [req-59dce922-c750-487f-bc03-348d0c6d9a0a req-e86dc352-7dad-4018-96c9-34281a977656 service nova] Acquired lock "refresh_cache-2a7d4872-4ed7-4058-bc36-b199d89a9f14" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 743.597252] env[61273]: DEBUG nova.network.neutron [req-59dce922-c750-487f-bc03-348d0c6d9a0a req-e86dc352-7dad-4018-96c9-34281a977656 service nova] [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] Refreshing network info cache for port 8af34a72-d61f-4969-b3b5-acef24d3c087 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 743.597668] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-59dce922-c750-487f-bc03-348d0c6d9a0a req-e86dc352-7dad-4018-96c9-34281a977656 service nova] Expecting reply to msg 513e8a53f6664c7c813625b86e180d95 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 743.604077] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 513e8a53f6664c7c813625b86e180d95 [ 743.671438] env[61273]: DEBUG nova.network.neutron [req-0f521bca-ee4e-4e64-8f15-e322f3c1cabf req-f8b3fe80-3d42-47f0-8907-a40d06567246 service nova] [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 743.733454] env[61273]: DEBUG oslo_vmware.api [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]52f1dc2b-664b-0d3c-6d1c-b2a81c0eff2a, 'name': SearchDatastore_Task, 'duration_secs': 0.00812} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.734363] env[61273]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b1c7ebd-95e3-4566-b681-16ebb84e6e72 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.739610] env[61273]: DEBUG oslo_vmware.api [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Waiting for the task: (returnval){ [ 743.739610] env[61273]: value = "session[527e49f3-ce92-061c-8407-29c8a2392124]5296f877-ed73-bac2-1484-4192ec19fe2a" [ 743.739610] env[61273]: _type = "Task" [ 743.739610] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.743421] env[61273]: DEBUG nova.network.neutron [-] [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 743.744063] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 5ad8ccf0aead4b9bb65ac6459a136dc6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 743.751400] env[61273]: DEBUG oslo_vmware.api [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]5296f877-ed73-bac2-1484-4192ec19fe2a, 'name': SearchDatastore_Task, 'duration_secs': 0.008283} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.752087] env[61273]: DEBUG oslo_concurrency.lockutils [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 743.752638] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk to [datastore2] 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d/1efd9cfe-3a0c-412c-aa44-3bf650d08f9d.vmdk {{(pid=61273) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 743.753440] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5ad8ccf0aead4b9bb65ac6459a136dc6 [ 743.753972] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-065fd1e1-0be0-49ba-870f-e5934dc94518 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.760660] env[61273]: DEBUG oslo_vmware.api [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Waiting for the task: (returnval){ [ 743.760660] env[61273]: value = "task-375314" [ 743.760660] env[61273]: _type = "Task" [ 743.760660] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.782138] env[61273]: DEBUG oslo_vmware.api [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Task: {'id': task-375314, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.788805] env[61273]: ERROR nova.compute.manager [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 8af34a72-d61f-4969-b3b5-acef24d3c087, please check neutron logs for more information. [ 743.788805] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 743.788805] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 743.788805] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 743.788805] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 743.788805] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 743.788805] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 743.788805] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 743.788805] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 743.788805] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 743.788805] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 743.788805] env[61273]: ERROR nova.compute.manager raise self.value [ 743.788805] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 743.788805] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 743.788805] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 743.788805] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 743.789443] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 743.789443] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 743.789443] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 8af34a72-d61f-4969-b3b5-acef24d3c087, please check neutron logs for more information. [ 743.789443] env[61273]: ERROR nova.compute.manager [ 743.789999] env[61273]: Traceback (most recent call last): [ 743.790103] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 743.790103] env[61273]: listener.cb(fileno) [ 743.790197] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 743.790197] env[61273]: result = function(*args, **kwargs) [ 743.790289] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 743.790289] env[61273]: return func(*args, **kwargs) [ 743.790373] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 743.790373] env[61273]: raise e [ 743.790456] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 743.790456] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 743.790616] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 743.790616] env[61273]: created_port_ids = self._update_ports_for_instance( [ 743.790702] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 743.790702] env[61273]: with excutils.save_and_reraise_exception(): [ 743.790793] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 743.790793] env[61273]: self.force_reraise() [ 743.790879] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 743.790879] env[61273]: raise self.value [ 743.790961] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 743.790961] env[61273]: updated_port = self._update_port( [ 743.791053] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 743.791053] env[61273]: _ensure_no_port_binding_failure(port) [ 743.791159] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 743.791159] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 743.791262] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 8af34a72-d61f-4969-b3b5-acef24d3c087, please check neutron logs for more information. [ 743.791325] env[61273]: Removing descriptor: 15 [ 743.792685] env[61273]: DEBUG nova.network.neutron [req-0f521bca-ee4e-4e64-8f15-e322f3c1cabf req-f8b3fe80-3d42-47f0-8907-a40d06567246 service nova] [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 743.793590] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-0f521bca-ee4e-4e64-8f15-e322f3c1cabf req-f8b3fe80-3d42-47f0-8907-a40d06567246 service nova] Expecting reply to msg a9a034b8d1c745e0873483f5a8f54844 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 743.807612] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a9a034b8d1c745e0873483f5a8f54844 [ 743.988435] env[61273]: DEBUG nova.scheduler.client.report [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 743.992658] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg a1429b0d35ef41ea9f9431939203ac64 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 744.008917] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a1429b0d35ef41ea9f9431939203ac64 [ 744.050974] env[61273]: DEBUG nova.compute.manager [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 744.091077] env[61273]: DEBUG nova.virt.hardware [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 744.091575] env[61273]: DEBUG nova.virt.hardware [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 744.091901] env[61273]: DEBUG nova.virt.hardware [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 744.092270] env[61273]: DEBUG nova.virt.hardware [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 744.092558] env[61273]: DEBUG nova.virt.hardware [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 744.092840] env[61273]: DEBUG nova.virt.hardware [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 744.093187] env[61273]: DEBUG nova.virt.hardware [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 744.093461] env[61273]: DEBUG nova.virt.hardware [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 744.093783] env[61273]: DEBUG nova.virt.hardware [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 744.094080] env[61273]: DEBUG nova.virt.hardware [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 744.094368] env[61273]: DEBUG nova.virt.hardware [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 744.095386] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99364dcc-0d45-409a-b7b3-bc4a0d0b389f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.106141] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d2db6a8-d64d-4025-b8be-f1d71ad7844b {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.124071] env[61273]: ERROR nova.compute.manager [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 8af34a72-d61f-4969-b3b5-acef24d3c087, please check neutron logs for more information. [ 744.124071] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] Traceback (most recent call last): [ 744.124071] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 744.124071] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] yield resources [ 744.124071] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 744.124071] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] self.driver.spawn(context, instance, image_meta, [ 744.124071] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 744.124071] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] self._vmops.spawn(context, instance, image_meta, injected_files, [ 744.124071] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 744.124071] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] vm_ref = self.build_virtual_machine(instance, [ 744.124071] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 744.124554] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] vif_infos = vmwarevif.get_vif_info(self._session, [ 744.124554] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 744.124554] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] for vif in network_info: [ 744.124554] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 744.124554] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] return self._sync_wrapper(fn, *args, **kwargs) [ 744.124554] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 744.124554] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] self.wait() [ 744.124554] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 744.124554] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] self[:] = self._gt.wait() [ 744.124554] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 744.124554] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] return self._exit_event.wait() [ 744.124554] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 744.124554] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] current.throw(*self._exc) [ 744.125116] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 744.125116] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] result = function(*args, **kwargs) [ 744.125116] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 744.125116] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] return func(*args, **kwargs) [ 744.125116] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 744.125116] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] raise e [ 744.125116] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 744.125116] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] nwinfo = self.network_api.allocate_for_instance( [ 744.125116] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 744.125116] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] created_port_ids = self._update_ports_for_instance( [ 744.125116] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 744.125116] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] with excutils.save_and_reraise_exception(): [ 744.125116] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 744.125539] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] self.force_reraise() [ 744.125539] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 744.125539] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] raise self.value [ 744.125539] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 744.125539] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] updated_port = self._update_port( [ 744.125539] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 744.125539] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] _ensure_no_port_binding_failure(port) [ 744.125539] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 744.125539] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] raise exception.PortBindingFailed(port_id=port['id']) [ 744.125539] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] nova.exception.PortBindingFailed: Binding failed for port 8af34a72-d61f-4969-b3b5-acef24d3c087, please check neutron logs for more information. [ 744.125539] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] [ 744.126413] env[61273]: INFO nova.compute.manager [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] Terminating instance [ 744.129110] env[61273]: DEBUG nova.network.neutron [req-59dce922-c750-487f-bc03-348d0c6d9a0a req-e86dc352-7dad-4018-96c9-34281a977656 service nova] [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 744.131563] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] Acquiring lock "refresh_cache-2a7d4872-4ed7-4058-bc36-b199d89a9f14" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 744.246986] env[61273]: INFO nova.compute.manager [-] [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] Took 1.03 seconds to deallocate network for instance. [ 744.249711] env[61273]: DEBUG nova.compute.claims [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 744.250032] env[61273]: DEBUG oslo_concurrency.lockutils [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 744.263371] env[61273]: DEBUG nova.network.neutron [req-59dce922-c750-487f-bc03-348d0c6d9a0a req-e86dc352-7dad-4018-96c9-34281a977656 service nova] [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 744.264853] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-59dce922-c750-487f-bc03-348d0c6d9a0a req-e86dc352-7dad-4018-96c9-34281a977656 service nova] Expecting reply to msg 85ebc55cc3764b82b1f16705a3b47399 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 744.274126] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 85ebc55cc3764b82b1f16705a3b47399 [ 744.280187] env[61273]: DEBUG oslo_vmware.api [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Task: {'id': task-375314, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.47101} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.280611] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk to [datastore2] 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d/1efd9cfe-3a0c-412c-aa44-3bf650d08f9d.vmdk {{(pid=61273) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 744.280977] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Extending root virtual disk to 1048576 {{(pid=61273) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 744.281427] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7ed27991-3104-447c-815a-8974abc17485 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.287821] env[61273]: DEBUG oslo_vmware.api [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Waiting for the task: (returnval){ [ 744.287821] env[61273]: value = "task-375315" [ 744.287821] env[61273]: _type = "Task" [ 744.287821] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.296679] env[61273]: DEBUG oslo_concurrency.lockutils [req-0f521bca-ee4e-4e64-8f15-e322f3c1cabf req-f8b3fe80-3d42-47f0-8907-a40d06567246 service nova] Releasing lock "refresh_cache-f3df4816-ef02-4ecc-a8ca-4f0eaf286218" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 744.297059] env[61273]: DEBUG nova.compute.manager [req-0f521bca-ee4e-4e64-8f15-e322f3c1cabf req-f8b3fe80-3d42-47f0-8907-a40d06567246 service nova] [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] Received event network-vif-deleted-0a0b4bd2-3d3f-440c-98f7-c40827ffa1df {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 744.297487] env[61273]: DEBUG oslo_vmware.api [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Task: {'id': task-375315, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.496825] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.474s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 744.496905] env[61273]: DEBUG nova.compute.manager [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: c376b161-74f9-405a-bb86-516583a9a76f] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 744.498537] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 8a6e58c66d0340f28521025a9af36519 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 744.499572] env[61273]: DEBUG oslo_concurrency.lockutils [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.576s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 744.500946] env[61273]: INFO nova.compute.claims [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 744.502447] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Expecting reply to msg 5c212cce36794e05984bccd6c2826b44 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 744.539217] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8a6e58c66d0340f28521025a9af36519 [ 744.541726] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5c212cce36794e05984bccd6c2826b44 [ 744.770762] env[61273]: DEBUG oslo_concurrency.lockutils [req-59dce922-c750-487f-bc03-348d0c6d9a0a req-e86dc352-7dad-4018-96c9-34281a977656 service nova] Releasing lock "refresh_cache-2a7d4872-4ed7-4058-bc36-b199d89a9f14" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 744.771176] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] Acquired lock "refresh_cache-2a7d4872-4ed7-4058-bc36-b199d89a9f14" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 744.771314] env[61273]: DEBUG nova.network.neutron [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 744.771779] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] Expecting reply to msg 9c423ffb7e6b4dccb2c45f37d23fab3d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 744.778680] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9c423ffb7e6b4dccb2c45f37d23fab3d [ 744.797788] env[61273]: DEBUG oslo_vmware.api [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Task: {'id': task-375315, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060789} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.797955] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Extended root virtual disk {{(pid=61273) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 744.798689] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2632c29-7bcd-4fb4-8be2-672c5fbf7148 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.817626] env[61273]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Reconfiguring VM instance instance-0000002c to attach disk [datastore2] 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d/1efd9cfe-3a0c-412c-aa44-3bf650d08f9d.vmdk or device None with type sparse {{(pid=61273) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 744.818131] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b9e69283-a172-4765-998a-3f44573c4211 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.837904] env[61273]: DEBUG oslo_vmware.api [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Waiting for the task: (returnval){ [ 744.837904] env[61273]: value = "task-375316" [ 744.837904] env[61273]: _type = "Task" [ 744.837904] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.846970] env[61273]: DEBUG oslo_vmware.api [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Task: {'id': task-375316, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.005898] env[61273]: DEBUG nova.compute.utils [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 745.006578] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg d4aa2a62c5b44880b3032799ccb9de86 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 745.016511] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Expecting reply to msg 83ac8f372db74525bc1e06a9ff877365 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 745.016867] env[61273]: DEBUG nova.compute.manager [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: c376b161-74f9-405a-bb86-516583a9a76f] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 745.017024] env[61273]: DEBUG nova.network.neutron [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: c376b161-74f9-405a-bb86-516583a9a76f] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 745.023127] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d4aa2a62c5b44880b3032799ccb9de86 [ 745.023722] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 83ac8f372db74525bc1e06a9ff877365 [ 745.079227] env[61273]: DEBUG nova.policy [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3078a2af81b248f8b100f58ee66a5a86', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c06b50a7aaa742afbbd0c6fc56c3d131', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 745.287876] env[61273]: DEBUG nova.network.neutron [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 745.351823] env[61273]: DEBUG oslo_vmware.api [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Task: {'id': task-375316, 'name': ReconfigVM_Task, 'duration_secs': 0.269281} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.352210] env[61273]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Reconfigured VM instance instance-0000002c to attach disk [datastore2] 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d/1efd9cfe-3a0c-412c-aa44-3bf650d08f9d.vmdk or device None with type sparse {{(pid=61273) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 745.352863] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1e12437e-892a-40d2-81ab-46eba0c649e2 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.359701] env[61273]: DEBUG oslo_vmware.api [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Waiting for the task: (returnval){ [ 745.359701] env[61273]: value = "task-375317" [ 745.359701] env[61273]: _type = "Task" [ 745.359701] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.371160] env[61273]: DEBUG oslo_vmware.api [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Task: {'id': task-375317, 'name': Rename_Task} progress is 5%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.373557] env[61273]: DEBUG nova.network.neutron [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: c376b161-74f9-405a-bb86-516583a9a76f] Successfully created port: 1798125d-2352-4879-a756-ef155a7b85e5 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 745.388223] env[61273]: DEBUG nova.network.neutron [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 745.388223] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] Expecting reply to msg 5ff11abff4ad48b1b3758c596659dcd3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 745.398663] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5ff11abff4ad48b1b3758c596659dcd3 [ 745.519558] env[61273]: DEBUG nova.compute.manager [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: c376b161-74f9-405a-bb86-516583a9a76f] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 745.521484] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg daf7c5f24def44d3b3d0607076653b62 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 745.561687] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg daf7c5f24def44d3b3d0607076653b62 [ 745.776933] env[61273]: DEBUG nova.compute.manager [req-2c3dfc6d-829e-4e57-a4d0-9113f2b56ab4 req-eb61f46c-2023-4203-968d-3c9cb614effe service nova] [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] Received event network-vif-deleted-8af34a72-d61f-4969-b3b5-acef24d3c087 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 745.836596] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da1c99a0-60a0-4db2-9a2f-4f441f9c695d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.843976] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0548999f-989c-4b67-b3b0-15f32d09c800 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.876246] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd8130fc-b226-41b7-8708-7a4e89bc1842 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.884277] env[61273]: DEBUG oslo_vmware.api [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Task: {'id': task-375317, 'name': Rename_Task, 'duration_secs': 0.129881} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.885636] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Powering on the VM {{(pid=61273) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 745.885728] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-127cc0b0-43a3-421d-8fe1-3b011ab49c53 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.887972] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14db5212-5b71-4d2d-929d-ec99d7313680 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.892288] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] Releasing lock "refresh_cache-2a7d4872-4ed7-4058-bc36-b199d89a9f14" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 745.892674] env[61273]: DEBUG nova.compute.manager [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 745.892869] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 745.893451] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dd34e0ae-535f-4f34-b018-0381fc9b2065 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.898351] env[61273]: DEBUG oslo_vmware.api [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Waiting for the task: (returnval){ [ 745.898351] env[61273]: value = "task-375318" [ 745.898351] env[61273]: _type = "Task" [ 745.898351] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.906114] env[61273]: DEBUG nova.compute.provider_tree [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 745.906596] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Expecting reply to msg 07f8c96f2a284400b53a892f81509526 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 745.914047] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 07f8c96f2a284400b53a892f81509526 [ 745.922154] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b10eb19a-8d5c-420d-bcdb-6d06b498f1db {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.932381] env[61273]: DEBUG oslo_vmware.api [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Task: {'id': task-375318, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.944962] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2a7d4872-4ed7-4058-bc36-b199d89a9f14 could not be found. [ 745.945186] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 745.945364] env[61273]: INFO nova.compute.manager [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] Took 0.05 seconds to destroy the instance on the hypervisor. [ 745.945600] env[61273]: DEBUG oslo.service.loopingcall [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 745.946063] env[61273]: DEBUG nova.compute.manager [-] [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 745.946174] env[61273]: DEBUG nova.network.neutron [-] [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 745.963123] env[61273]: DEBUG nova.network.neutron [-] [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 745.963608] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 4e5a20884b074205a850e16f7d63537f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 745.970445] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4e5a20884b074205a850e16f7d63537f [ 746.027232] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg bab4e77b97fb41a494723666d0610e66 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 746.060153] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bab4e77b97fb41a494723666d0610e66 [ 746.165286] env[61273]: DEBUG nova.compute.manager [req-4cbe52fa-6e54-42d0-8223-c4156208208f req-0797a823-b3a4-40da-b101-eb98da1381e3 service nova] [instance: c376b161-74f9-405a-bb86-516583a9a76f] Received event network-changed-1798125d-2352-4879-a756-ef155a7b85e5 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 746.165476] env[61273]: DEBUG nova.compute.manager [req-4cbe52fa-6e54-42d0-8223-c4156208208f req-0797a823-b3a4-40da-b101-eb98da1381e3 service nova] [instance: c376b161-74f9-405a-bb86-516583a9a76f] Refreshing instance network info cache due to event network-changed-1798125d-2352-4879-a756-ef155a7b85e5. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 746.165714] env[61273]: DEBUG oslo_concurrency.lockutils [req-4cbe52fa-6e54-42d0-8223-c4156208208f req-0797a823-b3a4-40da-b101-eb98da1381e3 service nova] Acquiring lock "refresh_cache-c376b161-74f9-405a-bb86-516583a9a76f" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 746.165815] env[61273]: DEBUG oslo_concurrency.lockutils [req-4cbe52fa-6e54-42d0-8223-c4156208208f req-0797a823-b3a4-40da-b101-eb98da1381e3 service nova] Acquired lock "refresh_cache-c376b161-74f9-405a-bb86-516583a9a76f" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 746.165966] env[61273]: DEBUG nova.network.neutron [req-4cbe52fa-6e54-42d0-8223-c4156208208f req-0797a823-b3a4-40da-b101-eb98da1381e3 service nova] [instance: c376b161-74f9-405a-bb86-516583a9a76f] Refreshing network info cache for port 1798125d-2352-4879-a756-ef155a7b85e5 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 746.166433] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-4cbe52fa-6e54-42d0-8223-c4156208208f req-0797a823-b3a4-40da-b101-eb98da1381e3 service nova] Expecting reply to msg bfb24bec020e4b7da6649fb609876d81 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 746.172945] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bfb24bec020e4b7da6649fb609876d81 [ 746.393571] env[61273]: ERROR nova.compute.manager [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 1798125d-2352-4879-a756-ef155a7b85e5, please check neutron logs for more information. [ 746.393571] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 746.393571] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 746.393571] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 746.393571] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 746.393571] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 746.393571] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 746.393571] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 746.393571] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 746.393571] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 746.393571] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 746.393571] env[61273]: ERROR nova.compute.manager raise self.value [ 746.393571] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 746.393571] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 746.393571] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 746.393571] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 746.394208] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 746.394208] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 746.394208] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 1798125d-2352-4879-a756-ef155a7b85e5, please check neutron logs for more information. [ 746.394208] env[61273]: ERROR nova.compute.manager [ 746.394208] env[61273]: Traceback (most recent call last): [ 746.394208] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 746.394208] env[61273]: listener.cb(fileno) [ 746.394208] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 746.394208] env[61273]: result = function(*args, **kwargs) [ 746.394208] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 746.394208] env[61273]: return func(*args, **kwargs) [ 746.394208] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 746.394208] env[61273]: raise e [ 746.394208] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 746.394208] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 746.394208] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 746.394208] env[61273]: created_port_ids = self._update_ports_for_instance( [ 746.394208] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 746.394208] env[61273]: with excutils.save_and_reraise_exception(): [ 746.394208] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 746.394208] env[61273]: self.force_reraise() [ 746.394208] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 746.394208] env[61273]: raise self.value [ 746.394208] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 746.394208] env[61273]: updated_port = self._update_port( [ 746.394208] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 746.394208] env[61273]: _ensure_no_port_binding_failure(port) [ 746.394208] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 746.394208] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 746.395225] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 1798125d-2352-4879-a756-ef155a7b85e5, please check neutron logs for more information. [ 746.395225] env[61273]: Removing descriptor: 19 [ 746.415677] env[61273]: DEBUG nova.scheduler.client.report [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 746.418154] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Expecting reply to msg c97144036f804cd59c4e34cbad62640d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 746.419049] env[61273]: DEBUG oslo_vmware.api [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Task: {'id': task-375318, 'name': PowerOnVM_Task, 'duration_secs': 0.475023} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.424032] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Powered on the VM {{(pid=61273) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 746.424032] env[61273]: DEBUG nova.compute.manager [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Checking state {{(pid=61273) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 746.424032] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80714ecb-ac0c-4e0f-bbce-8c154b116826 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.430467] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Expecting reply to msg 134096f5f7f342f6a1e192cc579fb148 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 746.437005] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c97144036f804cd59c4e34cbad62640d [ 746.465310] env[61273]: DEBUG nova.network.neutron [-] [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 746.465896] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 479a402c258b4b88b183edc2b4becf17 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 746.468933] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 134096f5f7f342f6a1e192cc579fb148 [ 746.474357] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 479a402c258b4b88b183edc2b4becf17 [ 746.530832] env[61273]: DEBUG nova.compute.manager [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: c376b161-74f9-405a-bb86-516583a9a76f] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 746.555741] env[61273]: DEBUG nova.virt.hardware [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 746.555989] env[61273]: DEBUG nova.virt.hardware [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 746.556166] env[61273]: DEBUG nova.virt.hardware [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 746.556347] env[61273]: DEBUG nova.virt.hardware [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 746.556489] env[61273]: DEBUG nova.virt.hardware [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 746.556634] env[61273]: DEBUG nova.virt.hardware [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 746.556939] env[61273]: DEBUG nova.virt.hardware [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 746.557129] env[61273]: DEBUG nova.virt.hardware [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 746.557297] env[61273]: DEBUG nova.virt.hardware [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 746.557455] env[61273]: DEBUG nova.virt.hardware [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 746.557623] env[61273]: DEBUG nova.virt.hardware [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 746.558500] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1736b9d8-bdc9-4372-9005-2fca00362cac {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.566220] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73fe6ae9-744a-4d02-8be5-4e6d51979ee4 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.580190] env[61273]: ERROR nova.compute.manager [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: c376b161-74f9-405a-bb86-516583a9a76f] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 1798125d-2352-4879-a756-ef155a7b85e5, please check neutron logs for more information. [ 746.580190] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] Traceback (most recent call last): [ 746.580190] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 746.580190] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] yield resources [ 746.580190] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 746.580190] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] self.driver.spawn(context, instance, image_meta, [ 746.580190] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 746.580190] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 746.580190] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 746.580190] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] vm_ref = self.build_virtual_machine(instance, [ 746.580190] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 746.580630] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] vif_infos = vmwarevif.get_vif_info(self._session, [ 746.580630] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 746.580630] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] for vif in network_info: [ 746.580630] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 746.580630] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] return self._sync_wrapper(fn, *args, **kwargs) [ 746.580630] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 746.580630] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] self.wait() [ 746.580630] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 746.580630] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] self[:] = self._gt.wait() [ 746.580630] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 746.580630] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] return self._exit_event.wait() [ 746.580630] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 746.580630] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] current.throw(*self._exc) [ 746.581119] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 746.581119] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] result = function(*args, **kwargs) [ 746.581119] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 746.581119] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] return func(*args, **kwargs) [ 746.581119] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 746.581119] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] raise e [ 746.581119] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 746.581119] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] nwinfo = self.network_api.allocate_for_instance( [ 746.581119] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 746.581119] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] created_port_ids = self._update_ports_for_instance( [ 746.581119] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 746.581119] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] with excutils.save_and_reraise_exception(): [ 746.581119] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 746.581571] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] self.force_reraise() [ 746.581571] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 746.581571] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] raise self.value [ 746.581571] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 746.581571] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] updated_port = self._update_port( [ 746.581571] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 746.581571] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] _ensure_no_port_binding_failure(port) [ 746.581571] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 746.581571] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] raise exception.PortBindingFailed(port_id=port['id']) [ 746.581571] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] nova.exception.PortBindingFailed: Binding failed for port 1798125d-2352-4879-a756-ef155a7b85e5, please check neutron logs for more information. [ 746.581571] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] [ 746.581571] env[61273]: INFO nova.compute.manager [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: c376b161-74f9-405a-bb86-516583a9a76f] Terminating instance [ 746.582475] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Acquiring lock "refresh_cache-c376b161-74f9-405a-bb86-516583a9a76f" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 746.681555] env[61273]: DEBUG nova.network.neutron [req-4cbe52fa-6e54-42d0-8223-c4156208208f req-0797a823-b3a4-40da-b101-eb98da1381e3 service nova] [instance: c376b161-74f9-405a-bb86-516583a9a76f] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 746.753842] env[61273]: DEBUG nova.network.neutron [req-4cbe52fa-6e54-42d0-8223-c4156208208f req-0797a823-b3a4-40da-b101-eb98da1381e3 service nova] [instance: c376b161-74f9-405a-bb86-516583a9a76f] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 746.754621] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-4cbe52fa-6e54-42d0-8223-c4156208208f req-0797a823-b3a4-40da-b101-eb98da1381e3 service nova] Expecting reply to msg 675f9e53bb744403ab84a56353ac85aa in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 746.764770] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 675f9e53bb744403ab84a56353ac85aa [ 746.920602] env[61273]: DEBUG oslo_concurrency.lockutils [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.421s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 746.921129] env[61273]: DEBUG nova.compute.manager [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 746.923507] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Expecting reply to msg e36410b330f8402b9c8ecba2c8caa2b2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 746.924047] env[61273]: DEBUG oslo_concurrency.lockutils [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.128s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 746.925724] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] Expecting reply to msg 3d73597afcc64b099f7bd4296f2fa18a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 746.945442] env[61273]: DEBUG oslo_concurrency.lockutils [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 746.960457] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e36410b330f8402b9c8ecba2c8caa2b2 [ 746.969219] env[61273]: INFO nova.compute.manager [-] [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] Took 1.02 seconds to deallocate network for instance. [ 746.970279] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3d73597afcc64b099f7bd4296f2fa18a [ 746.976813] env[61273]: DEBUG nova.compute.claims [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 746.977006] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 747.257505] env[61273]: DEBUG oslo_concurrency.lockutils [req-4cbe52fa-6e54-42d0-8223-c4156208208f req-0797a823-b3a4-40da-b101-eb98da1381e3 service nova] Releasing lock "refresh_cache-c376b161-74f9-405a-bb86-516583a9a76f" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 747.257929] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Acquired lock "refresh_cache-c376b161-74f9-405a-bb86-516583a9a76f" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 747.258125] env[61273]: DEBUG nova.network.neutron [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: c376b161-74f9-405a-bb86-516583a9a76f] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 747.258566] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg f2c82ca953214469a6c930f507290316 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 747.265464] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f2c82ca953214469a6c930f507290316 [ 747.428696] env[61273]: DEBUG nova.compute.utils [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 747.429349] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Expecting reply to msg d1dffc34f3cd45649008659f29b3b5e5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 747.430305] env[61273]: DEBUG nova.compute.manager [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Not allocating networking since 'none' was specified. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 747.441844] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d1dffc34f3cd45649008659f29b3b5e5 [ 747.674199] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-2c077140-4e31-409a-a435-6faef6bfe635 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Expecting reply to msg 6863606250c44f499e6cc2678728ebc1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 747.685504] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6863606250c44f499e6cc2678728ebc1 [ 747.716180] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2f0a743-2a55-42ab-b33b-6b11e7def881 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.723818] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acd5bd01-5155-4291-a6ae-4fc6971fc941 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.757564] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34f0f2bc-0aca-4d5f-a6b9-0736c62c8e9f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.767119] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c84015e1-bb9f-490b-ac09-453f85c263a7 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.787043] env[61273]: DEBUG nova.compute.provider_tree [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 747.787573] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] Expecting reply to msg 29a8b9aae49e4443bbd5ffbed6386c0c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 747.794579] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 29a8b9aae49e4443bbd5ffbed6386c0c [ 747.799116] env[61273]: DEBUG nova.network.neutron [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: c376b161-74f9-405a-bb86-516583a9a76f] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 747.913932] env[61273]: DEBUG nova.network.neutron [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: c376b161-74f9-405a-bb86-516583a9a76f] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 747.914454] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg f3729fe883094911b8f025def86c1f4d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 747.922745] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f3729fe883094911b8f025def86c1f4d [ 747.931919] env[61273]: DEBUG nova.compute.manager [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 747.933692] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Expecting reply to msg f0cd872662754ec69ae3da1f0dff73c2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 747.966632] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f0cd872662754ec69ae3da1f0dff73c2 [ 748.178497] env[61273]: DEBUG oslo_concurrency.lockutils [None req-2c077140-4e31-409a-a435-6faef6bfe635 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Acquiring lock "1efd9cfe-3a0c-412c-aa44-3bf650d08f9d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 748.178789] env[61273]: DEBUG oslo_concurrency.lockutils [None req-2c077140-4e31-409a-a435-6faef6bfe635 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Lock "1efd9cfe-3a0c-412c-aa44-3bf650d08f9d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 748.179005] env[61273]: DEBUG oslo_concurrency.lockutils [None req-2c077140-4e31-409a-a435-6faef6bfe635 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Acquiring lock "1efd9cfe-3a0c-412c-aa44-3bf650d08f9d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 748.179187] env[61273]: DEBUG oslo_concurrency.lockutils [None req-2c077140-4e31-409a-a435-6faef6bfe635 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Lock "1efd9cfe-3a0c-412c-aa44-3bf650d08f9d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 748.179356] env[61273]: DEBUG oslo_concurrency.lockutils [None req-2c077140-4e31-409a-a435-6faef6bfe635 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Lock "1efd9cfe-3a0c-412c-aa44-3bf650d08f9d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 748.181684] env[61273]: INFO nova.compute.manager [None req-2c077140-4e31-409a-a435-6faef6bfe635 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Terminating instance [ 748.183305] env[61273]: DEBUG oslo_concurrency.lockutils [None req-2c077140-4e31-409a-a435-6faef6bfe635 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Acquiring lock "refresh_cache-1efd9cfe-3a0c-412c-aa44-3bf650d08f9d" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 748.183464] env[61273]: DEBUG oslo_concurrency.lockutils [None req-2c077140-4e31-409a-a435-6faef6bfe635 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Acquired lock "refresh_cache-1efd9cfe-3a0c-412c-aa44-3bf650d08f9d" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.183627] env[61273]: DEBUG nova.network.neutron [None req-2c077140-4e31-409a-a435-6faef6bfe635 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 748.184052] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-2c077140-4e31-409a-a435-6faef6bfe635 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Expecting reply to msg 9b838e16fbf34483b7167a4d1b4c1a0f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 748.190205] env[61273]: DEBUG nova.compute.manager [req-80af0e02-90e3-4ea1-a44a-9a68fa03de13 req-9fd9155a-60a5-4a51-ad0b-1a7e14b586de service nova] [instance: c376b161-74f9-405a-bb86-516583a9a76f] Received event network-vif-deleted-1798125d-2352-4879-a756-ef155a7b85e5 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 748.191693] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9b838e16fbf34483b7167a4d1b4c1a0f [ 748.290407] env[61273]: DEBUG nova.scheduler.client.report [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 748.293088] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] Expecting reply to msg 14e1941ee0624a36a5617b5d89c7fe89 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 748.305118] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 14e1941ee0624a36a5617b5d89c7fe89 [ 748.417187] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Releasing lock "refresh_cache-c376b161-74f9-405a-bb86-516583a9a76f" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 748.417618] env[61273]: DEBUG nova.compute.manager [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: c376b161-74f9-405a-bb86-516583a9a76f] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 748.417810] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: c376b161-74f9-405a-bb86-516583a9a76f] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 748.418130] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3ec7497b-28ef-4a9d-a064-bebb87071b7f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.426943] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eee56344-3a9b-420f-9637-b5e8f2115b80 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.439265] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Expecting reply to msg 487c47a45b364ea28b20fa4ec0e115b6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 748.452803] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: c376b161-74f9-405a-bb86-516583a9a76f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c376b161-74f9-405a-bb86-516583a9a76f could not be found. [ 748.453027] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: c376b161-74f9-405a-bb86-516583a9a76f] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 748.453211] env[61273]: INFO nova.compute.manager [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: c376b161-74f9-405a-bb86-516583a9a76f] Took 0.04 seconds to destroy the instance on the hypervisor. [ 748.453429] env[61273]: DEBUG oslo.service.loopingcall [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 748.453658] env[61273]: DEBUG nova.compute.manager [-] [instance: c376b161-74f9-405a-bb86-516583a9a76f] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 748.453771] env[61273]: DEBUG nova.network.neutron [-] [instance: c376b161-74f9-405a-bb86-516583a9a76f] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 748.468538] env[61273]: DEBUG nova.network.neutron [-] [instance: c376b161-74f9-405a-bb86-516583a9a76f] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 748.469040] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg dfddbe02da2f452b85c0bc80d876c1f4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 748.470108] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 487c47a45b364ea28b20fa4ec0e115b6 [ 748.476317] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dfddbe02da2f452b85c0bc80d876c1f4 [ 748.702391] env[61273]: DEBUG nova.network.neutron [None req-2c077140-4e31-409a-a435-6faef6bfe635 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 748.754516] env[61273]: DEBUG nova.network.neutron [None req-2c077140-4e31-409a-a435-6faef6bfe635 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 748.755051] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-2c077140-4e31-409a-a435-6faef6bfe635 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Expecting reply to msg 55b18785a6cd431195c3e1cc693a261e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 748.764903] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 55b18785a6cd431195c3e1cc693a261e [ 748.795600] env[61273]: DEBUG oslo_concurrency.lockutils [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.871s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 748.796458] env[61273]: ERROR nova.compute.manager [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 70e066e6-557a-491a-9f73-a77f76256833, please check neutron logs for more information. [ 748.796458] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] Traceback (most recent call last): [ 748.796458] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 748.796458] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] self.driver.spawn(context, instance, image_meta, [ 748.796458] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 748.796458] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] self._vmops.spawn(context, instance, image_meta, injected_files, [ 748.796458] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 748.796458] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] vm_ref = self.build_virtual_machine(instance, [ 748.796458] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 748.796458] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] vif_infos = vmwarevif.get_vif_info(self._session, [ 748.796458] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 748.796934] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] for vif in network_info: [ 748.796934] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 748.796934] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] return self._sync_wrapper(fn, *args, **kwargs) [ 748.796934] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 748.796934] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] self.wait() [ 748.796934] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 748.796934] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] self[:] = self._gt.wait() [ 748.796934] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 748.796934] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] return self._exit_event.wait() [ 748.796934] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 748.796934] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] current.throw(*self._exc) [ 748.796934] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 748.796934] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] result = function(*args, **kwargs) [ 748.797386] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 748.797386] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] return func(*args, **kwargs) [ 748.797386] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 748.797386] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] raise e [ 748.797386] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 748.797386] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] nwinfo = self.network_api.allocate_for_instance( [ 748.797386] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 748.797386] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] created_port_ids = self._update_ports_for_instance( [ 748.797386] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 748.797386] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] with excutils.save_and_reraise_exception(): [ 748.797386] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 748.797386] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] self.force_reraise() [ 748.797386] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 748.797845] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] raise self.value [ 748.797845] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 748.797845] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] updated_port = self._update_port( [ 748.797845] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 748.797845] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] _ensure_no_port_binding_failure(port) [ 748.797845] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 748.797845] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] raise exception.PortBindingFailed(port_id=port['id']) [ 748.797845] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] nova.exception.PortBindingFailed: Binding failed for port 70e066e6-557a-491a-9f73-a77f76256833, please check neutron logs for more information. [ 748.797845] env[61273]: ERROR nova.compute.manager [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] [ 748.798149] env[61273]: DEBUG nova.compute.utils [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] Binding failed for port 70e066e6-557a-491a-9f73-a77f76256833, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 748.799288] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 16.113s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 748.799460] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 748.799707] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61273) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 748.799881] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.943s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 748.801743] env[61273]: INFO nova.compute.claims [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 748.803248] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] Expecting reply to msg fa54eccaa4fb47adac011edc4d349b4a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 748.804723] env[61273]: DEBUG nova.compute.manager [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] Build of instance b6a158f8-6e2a-4967-ad05-761804ec6590 was re-scheduled: Binding failed for port 70e066e6-557a-491a-9f73-a77f76256833, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 748.805167] env[61273]: DEBUG nova.compute.manager [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 748.805390] env[61273]: DEBUG oslo_concurrency.lockutils [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] Acquiring lock "refresh_cache-b6a158f8-6e2a-4967-ad05-761804ec6590" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 748.805535] env[61273]: DEBUG oslo_concurrency.lockutils [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] Acquired lock "refresh_cache-b6a158f8-6e2a-4967-ad05-761804ec6590" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.805693] env[61273]: DEBUG nova.network.neutron [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 748.806063] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] Expecting reply to msg 1b1ca4989d834a85a10f18e6768e9cf2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 748.807273] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e750e856-04fc-4f6b-9927-9fa3abb63f07 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.812178] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1b1ca4989d834a85a10f18e6768e9cf2 [ 748.816045] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d364e08f-abe0-424a-91b9-9c6d64bb426f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.829937] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bae0d51-074b-4205-add9-01048e7b2d3d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.836640] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41d1c071-1c22-4910-ba3d-6448835095ef {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.867337] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fa54eccaa4fb47adac011edc4d349b4a [ 748.867879] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181708MB free_disk=141GB free_vcpus=48 pci_devices=None {{(pid=61273) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 748.868037] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 748.942101] env[61273]: DEBUG nova.compute.manager [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 748.966720] env[61273]: DEBUG nova.virt.hardware [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 748.966953] env[61273]: DEBUG nova.virt.hardware [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 748.967103] env[61273]: DEBUG nova.virt.hardware [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 748.967275] env[61273]: DEBUG nova.virt.hardware [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 748.967411] env[61273]: DEBUG nova.virt.hardware [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 748.967548] env[61273]: DEBUG nova.virt.hardware [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 748.967746] env[61273]: DEBUG nova.virt.hardware [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 748.967896] env[61273]: DEBUG nova.virt.hardware [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 748.968185] env[61273]: DEBUG nova.virt.hardware [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 748.968309] env[61273]: DEBUG nova.virt.hardware [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 748.968393] env[61273]: DEBUG nova.virt.hardware [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 748.969252] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee30eeda-d8cc-4a5c-8321-c2d0132989fb {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.972316] env[61273]: DEBUG nova.network.neutron [-] [instance: c376b161-74f9-405a-bb86-516583a9a76f] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 748.972826] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg b68d262a6de748e1b1d86775808154be in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 748.979217] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6658a9e-a850-4c30-ba29-4ba1a95cef9d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.983312] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b68d262a6de748e1b1d86775808154be [ 748.993851] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Instance VIF info [] {{(pid=61273) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 748.999813] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Creating folder: Project (e827316ff2e24c8da010c8f9def69e17). Parent ref: group-v103328. {{(pid=61273) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 749.000428] env[61273]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e6758166-6ae8-4829-bd84-26b34dc0d8f8 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.009533] env[61273]: INFO nova.virt.vmwareapi.vm_util [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Created folder: Project (e827316ff2e24c8da010c8f9def69e17) in parent group-v103328. [ 749.009688] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Creating folder: Instances. Parent ref: group-v103353. {{(pid=61273) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 749.009919] env[61273]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-69c26791-2ca9-4e46-a9b5-6568c4b3fd46 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.018332] env[61273]: INFO nova.virt.vmwareapi.vm_util [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Created folder: Instances in parent group-v103353. [ 749.018551] env[61273]: DEBUG oslo.service.loopingcall [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 749.018732] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Creating VM on the ESX host {{(pid=61273) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 749.018927] env[61273]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dc2b3661-b6e0-45bc-8a8f-6a607da60357 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.035280] env[61273]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 749.035280] env[61273]: value = "task-375321" [ 749.035280] env[61273]: _type = "Task" [ 749.035280] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.044145] env[61273]: DEBUG oslo_vmware.api [-] Task: {'id': task-375321, 'name': CreateVM_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.257875] env[61273]: DEBUG oslo_concurrency.lockutils [None req-2c077140-4e31-409a-a435-6faef6bfe635 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Releasing lock "refresh_cache-1efd9cfe-3a0c-412c-aa44-3bf650d08f9d" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 749.258284] env[61273]: DEBUG nova.compute.manager [None req-2c077140-4e31-409a-a435-6faef6bfe635 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 749.258491] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-2c077140-4e31-409a-a435-6faef6bfe635 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 749.259774] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b6b101c-440f-47ba-80cc-2c90c86ecd31 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.267526] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c077140-4e31-409a-a435-6faef6bfe635 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Powering off the VM {{(pid=61273) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 749.267774] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4162cecb-e2bc-427c-884d-7a55a61dfe2a {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.272963] env[61273]: DEBUG oslo_vmware.api [None req-2c077140-4e31-409a-a435-6faef6bfe635 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Waiting for the task: (returnval){ [ 749.272963] env[61273]: value = "task-375322" [ 749.272963] env[61273]: _type = "Task" [ 749.272963] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.282956] env[61273]: DEBUG oslo_vmware.api [None req-2c077140-4e31-409a-a435-6faef6bfe635 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Task: {'id': task-375322, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.313977] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] Expecting reply to msg 2e1ae4a2459c4160913ddcca9a40b728 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 749.327814] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2e1ae4a2459c4160913ddcca9a40b728 [ 749.328934] env[61273]: DEBUG nova.network.neutron [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 749.407400] env[61273]: DEBUG nova.network.neutron [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 749.408078] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] Expecting reply to msg fc878e855b6d42299e68f0cbb7f7f71d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 749.416712] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fc878e855b6d42299e68f0cbb7f7f71d [ 749.474690] env[61273]: INFO nova.compute.manager [-] [instance: c376b161-74f9-405a-bb86-516583a9a76f] Took 1.02 seconds to deallocate network for instance. [ 749.476998] env[61273]: DEBUG nova.compute.claims [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: c376b161-74f9-405a-bb86-516583a9a76f] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 749.477177] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 749.546204] env[61273]: DEBUG oslo_vmware.api [-] Task: {'id': task-375321, 'name': CreateVM_Task, 'duration_secs': 0.259475} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.546388] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Created VM on the ESX host {{(pid=61273) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 749.546829] env[61273]: DEBUG oslo_concurrency.lockutils [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 749.547013] env[61273]: DEBUG oslo_concurrency.lockutils [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.547332] env[61273]: DEBUG oslo_concurrency.lockutils [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 749.547875] env[61273]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-42cb130d-0b23-4f08-b3da-727033b14463 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.553000] env[61273]: DEBUG oslo_vmware.api [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Waiting for the task: (returnval){ [ 749.553000] env[61273]: value = "session[527e49f3-ce92-061c-8407-29c8a2392124]5275d84f-f681-8fc2-61da-71c70789f128" [ 749.553000] env[61273]: _type = "Task" [ 749.553000] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.562188] env[61273]: DEBUG oslo_vmware.api [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]5275d84f-f681-8fc2-61da-71c70789f128, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.782423] env[61273]: DEBUG oslo_vmware.api [None req-2c077140-4e31-409a-a435-6faef6bfe635 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Task: {'id': task-375322, 'name': PowerOffVM_Task, 'duration_secs': 0.174232} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.782708] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c077140-4e31-409a-a435-6faef6bfe635 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Powered off the VM {{(pid=61273) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 749.782887] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-2c077140-4e31-409a-a435-6faef6bfe635 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Unregistering the VM {{(pid=61273) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 749.783131] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7fdf4ca0-60fa-4898-8eae-1e5ee3cb2c2e {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.806545] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-2c077140-4e31-409a-a435-6faef6bfe635 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Unregistered the VM {{(pid=61273) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 749.806731] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-2c077140-4e31-409a-a435-6faef6bfe635 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Deleting contents of the VM from datastore datastore2 {{(pid=61273) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 749.806910] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c077140-4e31-409a-a435-6faef6bfe635 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Deleting the datastore file [datastore2] 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d {{(pid=61273) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 749.807163] env[61273]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2fa9838a-29cc-45a5-a9e7-75ffb95468c7 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.813327] env[61273]: DEBUG oslo_vmware.api [None req-2c077140-4e31-409a-a435-6faef6bfe635 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Waiting for the task: (returnval){ [ 749.813327] env[61273]: value = "task-375324" [ 749.813327] env[61273]: _type = "Task" [ 749.813327] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.820542] env[61273]: DEBUG oslo_vmware.api [None req-2c077140-4e31-409a-a435-6faef6bfe635 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Task: {'id': task-375324, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.910577] env[61273]: DEBUG oslo_concurrency.lockutils [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] Releasing lock "refresh_cache-b6a158f8-6e2a-4967-ad05-761804ec6590" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 749.910854] env[61273]: DEBUG nova.compute.manager [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 749.910947] env[61273]: DEBUG nova.compute.manager [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 749.911083] env[61273]: DEBUG nova.network.neutron [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 749.947101] env[61273]: DEBUG nova.network.neutron [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 749.947655] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] Expecting reply to msg 68ae924f26d1490eb36eee9877af3ed7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 749.954145] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 68ae924f26d1490eb36eee9877af3ed7 [ 750.064894] env[61273]: DEBUG oslo_vmware.api [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]5275d84f-f681-8fc2-61da-71c70789f128, 'name': SearchDatastore_Task, 'duration_secs': 0.010559} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.065194] env[61273]: DEBUG oslo_concurrency.lockutils [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 750.065408] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Processing image 4a9e718e-a6a1-4b4a-b567-8e55529b2d5b {{(pid=61273) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 750.065639] env[61273]: DEBUG oslo_concurrency.lockutils [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 750.065803] env[61273]: DEBUG oslo_concurrency.lockutils [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 750.065995] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61273) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 750.066245] env[61273]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-90f29e62-84ca-4f5e-8e1e-7453b9058513 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.073675] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61273) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 750.073888] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61273) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 750.076781] env[61273]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a0e08448-76f7-44b3-8230-a963eae99d79 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.082773] env[61273]: DEBUG oslo_vmware.api [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Waiting for the task: (returnval){ [ 750.082773] env[61273]: value = "session[527e49f3-ce92-061c-8407-29c8a2392124]5214f901-3891-84c4-ac2b-545662009c1a" [ 750.082773] env[61273]: _type = "Task" [ 750.082773] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.090363] env[61273]: DEBUG oslo_vmware.api [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]5214f901-3891-84c4-ac2b-545662009c1a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.125417] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d1d3327-38d9-4492-8bef-c884d650e0af {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.132603] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d3b2f9a-6776-4368-9c24-d3e931cb7195 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.161576] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23c6c334-d5da-435a-8504-7d2f2d57b0a6 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.168611] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3de6265c-65fe-4989-bbbd-8bf62c9c04f4 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.182863] env[61273]: DEBUG nova.compute.provider_tree [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 750.183310] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] Expecting reply to msg e252ef6f53a24040aeae9918e697b779 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 750.192422] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e252ef6f53a24040aeae9918e697b779 [ 750.323683] env[61273]: DEBUG oslo_vmware.api [None req-2c077140-4e31-409a-a435-6faef6bfe635 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Task: {'id': task-375324, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.09731} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.323969] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c077140-4e31-409a-a435-6faef6bfe635 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Deleted the datastore file {{(pid=61273) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 750.324164] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-2c077140-4e31-409a-a435-6faef6bfe635 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Deleted contents of the VM from datastore datastore2 {{(pid=61273) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 750.324340] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-2c077140-4e31-409a-a435-6faef6bfe635 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 750.324511] env[61273]: INFO nova.compute.manager [None req-2c077140-4e31-409a-a435-6faef6bfe635 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Took 1.07 seconds to destroy the instance on the hypervisor. [ 750.324788] env[61273]: DEBUG oslo.service.loopingcall [None req-2c077140-4e31-409a-a435-6faef6bfe635 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 750.324970] env[61273]: DEBUG nova.compute.manager [-] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 750.325064] env[61273]: DEBUG nova.network.neutron [-] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 750.340227] env[61273]: DEBUG nova.network.neutron [-] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 750.340728] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 65a2bf47d3fc4869802e6f648658b1cd in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 750.347720] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 65a2bf47d3fc4869802e6f648658b1cd [ 750.454591] env[61273]: DEBUG nova.network.neutron [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 750.455184] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] Expecting reply to msg 2b83763b64754433a79e163659bf6fb9 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 750.464618] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2b83763b64754433a79e163659bf6fb9 [ 750.600547] env[61273]: DEBUG oslo_vmware.api [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]5214f901-3891-84c4-ac2b-545662009c1a, 'name': SearchDatastore_Task, 'duration_secs': 0.008485} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.601319] env[61273]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3d9c2bc5-eca2-4022-9d85-6285bb03c454 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.606484] env[61273]: DEBUG oslo_vmware.api [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Waiting for the task: (returnval){ [ 750.606484] env[61273]: value = "session[527e49f3-ce92-061c-8407-29c8a2392124]52e6c842-172b-0b05-057d-c63f4cf30348" [ 750.606484] env[61273]: _type = "Task" [ 750.606484] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.613886] env[61273]: DEBUG oslo_vmware.api [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]52e6c842-172b-0b05-057d-c63f4cf30348, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.686136] env[61273]: DEBUG nova.scheduler.client.report [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 750.688736] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] Expecting reply to msg 982b2ab3335b4f00be1bd39de27c8cbf in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 750.698753] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 982b2ab3335b4f00be1bd39de27c8cbf [ 750.842918] env[61273]: DEBUG nova.network.neutron [-] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 750.843390] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 2c170d700c4a4ff4827ae9eab7893a4c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 750.852343] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2c170d700c4a4ff4827ae9eab7893a4c [ 750.957390] env[61273]: INFO nova.compute.manager [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] [instance: b6a158f8-6e2a-4967-ad05-761804ec6590] Took 1.05 seconds to deallocate network for instance. [ 750.959196] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] Expecting reply to msg 95601f5975c64aaab31c1b5e3b97fe81 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 751.000632] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 95601f5975c64aaab31c1b5e3b97fe81 [ 751.117076] env[61273]: DEBUG oslo_vmware.api [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]52e6c842-172b-0b05-057d-c63f4cf30348, 'name': SearchDatastore_Task, 'duration_secs': 0.008762} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.117307] env[61273]: DEBUG oslo_concurrency.lockutils [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 751.117558] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk to [datastore1] f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc/f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc.vmdk {{(pid=61273) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 751.117810] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-124cd68c-20a6-407b-9722-f7d56143da06 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.124189] env[61273]: DEBUG oslo_vmware.api [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Waiting for the task: (returnval){ [ 751.124189] env[61273]: value = "task-375325" [ 751.124189] env[61273]: _type = "Task" [ 751.124189] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.131426] env[61273]: DEBUG oslo_vmware.api [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Task: {'id': task-375325, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.191140] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.391s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 751.191793] env[61273]: DEBUG nova.compute.manager [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 751.193640] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] Expecting reply to msg be504efcb2184d1a97586da53ba121f1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 751.194778] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.141s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 751.196162] env[61273]: INFO nova.compute.claims [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 751.197616] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg 4e86988e69ad4374a1b60f1153de7758 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 751.225643] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg be504efcb2184d1a97586da53ba121f1 [ 751.244569] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4e86988e69ad4374a1b60f1153de7758 [ 751.345982] env[61273]: INFO nova.compute.manager [-] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Took 1.02 seconds to deallocate network for instance. [ 751.350187] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-2c077140-4e31-409a-a435-6faef6bfe635 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Expecting reply to msg 7725f31190a74a49856229eaae50a2c1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 751.381599] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7725f31190a74a49856229eaae50a2c1 [ 751.463883] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] Expecting reply to msg 78c429b606594c8db0e18788c1e8d328 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 751.508109] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 78c429b606594c8db0e18788c1e8d328 [ 751.635771] env[61273]: DEBUG oslo_vmware.api [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Task: {'id': task-375325, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.474707} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.636045] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk to [datastore1] f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc/f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc.vmdk {{(pid=61273) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 751.636271] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Extending root virtual disk to 1048576 {{(pid=61273) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 751.636523] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7e446f39-3b2b-446d-8243-8a0f4fa7102c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.642956] env[61273]: DEBUG oslo_vmware.api [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Waiting for the task: (returnval){ [ 751.642956] env[61273]: value = "task-375326" [ 751.642956] env[61273]: _type = "Task" [ 751.642956] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.651785] env[61273]: DEBUG oslo_vmware.api [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Task: {'id': task-375326, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.700760] env[61273]: DEBUG nova.compute.utils [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 751.701513] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] Expecting reply to msg 6731a2aed2104723ac1417cf6061644e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 751.702508] env[61273]: DEBUG nova.compute.manager [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 751.702688] env[61273]: DEBUG nova.network.neutron [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 751.709569] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg 84003f6240fb441ebbbf76280a93e72b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 751.712665] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6731a2aed2104723ac1417cf6061644e [ 751.716658] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 84003f6240fb441ebbbf76280a93e72b [ 751.852755] env[61273]: DEBUG oslo_concurrency.lockutils [None req-2c077140-4e31-409a-a435-6faef6bfe635 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 751.931275] env[61273]: DEBUG nova.policy [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4030d1d160204c7db3ef8c948ca18168', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '060b316170404510b74c98245e0dc553', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 751.992881] env[61273]: INFO nova.scheduler.client.report [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] Deleted allocations for instance b6a158f8-6e2a-4967-ad05-761804ec6590 [ 751.999314] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] Expecting reply to msg 18f333bc3d0946c099bf013605d705ba in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 752.019504] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 18f333bc3d0946c099bf013605d705ba [ 752.153463] env[61273]: DEBUG oslo_vmware.api [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Task: {'id': task-375326, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065148} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.153750] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Extended root virtual disk {{(pid=61273) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 752.154549] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae028918-6dc7-4cbb-9df0-aaa6a4940628 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.174046] env[61273]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Reconfiguring VM instance instance-00000032 to attach disk [datastore1] f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc/f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc.vmdk or device None with type sparse {{(pid=61273) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 752.174315] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-102b6ca3-2fb5-4930-ae73-2896cd792c74 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.193780] env[61273]: DEBUG oslo_vmware.api [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Waiting for the task: (returnval){ [ 752.193780] env[61273]: value = "task-375327" [ 752.193780] env[61273]: _type = "Task" [ 752.193780] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.201565] env[61273]: DEBUG oslo_vmware.api [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Task: {'id': task-375327, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.206190] env[61273]: DEBUG nova.compute.manager [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 752.207829] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] Expecting reply to msg 4bf724f40c2d44ba8ed0ed800816dbf9 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 752.249832] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4bf724f40c2d44ba8ed0ed800816dbf9 [ 752.356856] env[61273]: DEBUG nova.network.neutron [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] Successfully created port: b17ade9b-9979-4941-8442-f0ef91d65a14 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 752.507290] env[61273]: DEBUG oslo_concurrency.lockutils [None req-45f175ce-162c-47d2-9198-e74d4e7e13b3 tempest-InstanceActionsV221TestJSON-446942723 tempest-InstanceActionsV221TestJSON-446942723-project-member] Lock "b6a158f8-6e2a-4967-ad05-761804ec6590" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 137.616s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 752.507290] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Expecting reply to msg 3636761f39b044b882ad8df306d942b8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 752.518538] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3636761f39b044b882ad8df306d942b8 [ 752.536701] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42f0d10e-3f41-4c8a-9a56-7b398967900b {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.545320] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6053427b-5e92-4472-bf56-4ee6aaea1a87 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.579012] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72b7f952-af11-45d8-97d9-d8c28fa85f2d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.586358] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f324b387-3b25-4d2b-8251-768a96cbf1e9 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.600374] env[61273]: DEBUG nova.compute.provider_tree [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 752.601068] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg c1076553b43140708689da743e2fae15 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 752.608579] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c1076553b43140708689da743e2fae15 [ 752.704078] env[61273]: DEBUG oslo_vmware.api [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Task: {'id': task-375327, 'name': ReconfigVM_Task, 'duration_secs': 0.349868} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.704597] env[61273]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Reconfigured VM instance instance-00000032 to attach disk [datastore1] f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc/f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc.vmdk or device None with type sparse {{(pid=61273) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 752.705395] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-91eb9431-a5b3-443f-bbce-7bd1bbf5f579 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.711845] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] Expecting reply to msg c4b6ae7556a145c88a84338049a3c886 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 752.714107] env[61273]: DEBUG oslo_vmware.api [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Waiting for the task: (returnval){ [ 752.714107] env[61273]: value = "task-375328" [ 752.714107] env[61273]: _type = "Task" [ 752.714107] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.731937] env[61273]: DEBUG oslo_vmware.api [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Task: {'id': task-375328, 'name': Rename_Task} progress is 5%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.747302] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c4b6ae7556a145c88a84338049a3c886 [ 753.012574] env[61273]: DEBUG nova.compute.manager [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 753.012574] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Expecting reply to msg 8f7ba9ef027b460f870dd7459fb204ee in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 753.056808] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8f7ba9ef027b460f870dd7459fb204ee [ 753.103778] env[61273]: DEBUG nova.scheduler.client.report [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 753.106370] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg eec2ec00b3a24a3d802d02d489b85ea6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 753.121160] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eec2ec00b3a24a3d802d02d489b85ea6 [ 753.216745] env[61273]: DEBUG nova.compute.manager [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 753.228686] env[61273]: DEBUG oslo_vmware.api [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Task: {'id': task-375328, 'name': Rename_Task, 'duration_secs': 0.127796} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.228973] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Powering on the VM {{(pid=61273) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 753.229220] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2489867f-e37a-4aac-8545-fe30019861df {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.241637] env[61273]: DEBUG oslo_vmware.api [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Waiting for the task: (returnval){ [ 753.241637] env[61273]: value = "task-375329" [ 753.241637] env[61273]: _type = "Task" [ 753.241637] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.247133] env[61273]: DEBUG nova.virt.hardware [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 753.247354] env[61273]: DEBUG nova.virt.hardware [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 753.247502] env[61273]: DEBUG nova.virt.hardware [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 753.247693] env[61273]: DEBUG nova.virt.hardware [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 753.247895] env[61273]: DEBUG nova.virt.hardware [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 753.248085] env[61273]: DEBUG nova.virt.hardware [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 753.248294] env[61273]: DEBUG nova.virt.hardware [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 753.248449] env[61273]: DEBUG nova.virt.hardware [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 753.248604] env[61273]: DEBUG nova.virt.hardware [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 753.248756] env[61273]: DEBUG nova.virt.hardware [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 753.248921] env[61273]: DEBUG nova.virt.hardware [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 753.249688] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db3cff14-1bd9-4b5c-90cc-80fdf73f270a {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.256210] env[61273]: DEBUG oslo_vmware.api [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Task: {'id': task-375329, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.261501] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ecce445-58a1-4145-a1c5-96f1d200be44 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.267385] env[61273]: DEBUG nova.compute.manager [req-b5597d43-6ff1-4c63-938b-dffcc704af91 req-03b021d5-6bc6-4181-a24e-59d15088ffac service nova] [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] Received event network-changed-b17ade9b-9979-4941-8442-f0ef91d65a14 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 753.267646] env[61273]: DEBUG nova.compute.manager [req-b5597d43-6ff1-4c63-938b-dffcc704af91 req-03b021d5-6bc6-4181-a24e-59d15088ffac service nova] [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] Refreshing instance network info cache due to event network-changed-b17ade9b-9979-4941-8442-f0ef91d65a14. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 753.267766] env[61273]: DEBUG oslo_concurrency.lockutils [req-b5597d43-6ff1-4c63-938b-dffcc704af91 req-03b021d5-6bc6-4181-a24e-59d15088ffac service nova] Acquiring lock "refresh_cache-2cd160c6-98ac-44a7-831e-d0fa3a958b99" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 753.267903] env[61273]: DEBUG oslo_concurrency.lockutils [req-b5597d43-6ff1-4c63-938b-dffcc704af91 req-03b021d5-6bc6-4181-a24e-59d15088ffac service nova] Acquired lock "refresh_cache-2cd160c6-98ac-44a7-831e-d0fa3a958b99" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 753.268071] env[61273]: DEBUG nova.network.neutron [req-b5597d43-6ff1-4c63-938b-dffcc704af91 req-03b021d5-6bc6-4181-a24e-59d15088ffac service nova] [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] Refreshing network info cache for port b17ade9b-9979-4941-8442-f0ef91d65a14 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 753.268536] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-b5597d43-6ff1-4c63-938b-dffcc704af91 req-03b021d5-6bc6-4181-a24e-59d15088ffac service nova] Expecting reply to msg c778d977d3fa4cc8805b06b2a0e1e574 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 753.279189] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c778d977d3fa4cc8805b06b2a0e1e574 [ 753.533813] env[61273]: ERROR nova.compute.manager [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b17ade9b-9979-4941-8442-f0ef91d65a14, please check neutron logs for more information. [ 753.533813] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 753.533813] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 753.533813] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 753.533813] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 753.533813] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 753.533813] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 753.533813] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 753.533813] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 753.533813] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 753.533813] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 753.533813] env[61273]: ERROR nova.compute.manager raise self.value [ 753.533813] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 753.533813] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 753.533813] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 753.533813] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 753.534194] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 753.534194] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 753.534194] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b17ade9b-9979-4941-8442-f0ef91d65a14, please check neutron logs for more information. [ 753.534194] env[61273]: ERROR nova.compute.manager [ 753.534194] env[61273]: Traceback (most recent call last): [ 753.534194] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 753.534194] env[61273]: listener.cb(fileno) [ 753.534194] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 753.534194] env[61273]: result = function(*args, **kwargs) [ 753.534194] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 753.534194] env[61273]: return func(*args, **kwargs) [ 753.534194] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 753.534194] env[61273]: raise e [ 753.534194] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 753.534194] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 753.534194] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 753.534194] env[61273]: created_port_ids = self._update_ports_for_instance( [ 753.534194] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 753.534194] env[61273]: with excutils.save_and_reraise_exception(): [ 753.534194] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 753.534194] env[61273]: self.force_reraise() [ 753.534194] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 753.534194] env[61273]: raise self.value [ 753.534194] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 753.534194] env[61273]: updated_port = self._update_port( [ 753.534194] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 753.534194] env[61273]: _ensure_no_port_binding_failure(port) [ 753.534194] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 753.534194] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 753.534706] env[61273]: nova.exception.PortBindingFailed: Binding failed for port b17ade9b-9979-4941-8442-f0ef91d65a14, please check neutron logs for more information. [ 753.534706] env[61273]: Removing descriptor: 19 [ 753.534706] env[61273]: ERROR nova.compute.manager [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b17ade9b-9979-4941-8442-f0ef91d65a14, please check neutron logs for more information. [ 753.534706] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] Traceback (most recent call last): [ 753.534706] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 753.534706] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] yield resources [ 753.534706] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 753.534706] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] self.driver.spawn(context, instance, image_meta, [ 753.534706] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 753.534706] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] self._vmops.spawn(context, instance, image_meta, injected_files, [ 753.534706] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 753.534706] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] vm_ref = self.build_virtual_machine(instance, [ 753.534929] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 753.534929] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] vif_infos = vmwarevif.get_vif_info(self._session, [ 753.534929] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 753.534929] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] for vif in network_info: [ 753.534929] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 753.534929] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] return self._sync_wrapper(fn, *args, **kwargs) [ 753.534929] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 753.534929] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] self.wait() [ 753.534929] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 753.534929] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] self[:] = self._gt.wait() [ 753.534929] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 753.534929] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] return self._exit_event.wait() [ 753.534929] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 753.535182] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] result = hub.switch() [ 753.535182] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 753.535182] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] return self.greenlet.switch() [ 753.535182] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 753.535182] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] result = function(*args, **kwargs) [ 753.535182] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 753.535182] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] return func(*args, **kwargs) [ 753.535182] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 753.535182] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] raise e [ 753.535182] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 753.535182] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] nwinfo = self.network_api.allocate_for_instance( [ 753.535182] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 753.535182] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] created_port_ids = self._update_ports_for_instance( [ 753.535448] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 753.535448] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] with excutils.save_and_reraise_exception(): [ 753.535448] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 753.535448] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] self.force_reraise() [ 753.535448] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 753.535448] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] raise self.value [ 753.535448] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 753.535448] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] updated_port = self._update_port( [ 753.535448] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 753.535448] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] _ensure_no_port_binding_failure(port) [ 753.535448] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 753.535448] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] raise exception.PortBindingFailed(port_id=port['id']) [ 753.535677] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] nova.exception.PortBindingFailed: Binding failed for port b17ade9b-9979-4941-8442-f0ef91d65a14, please check neutron logs for more information. [ 753.535677] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] [ 753.535677] env[61273]: INFO nova.compute.manager [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] Terminating instance [ 753.537330] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 753.537818] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] Acquiring lock "refresh_cache-2cd160c6-98ac-44a7-831e-d0fa3a958b99" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 753.609680] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.415s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 753.610214] env[61273]: DEBUG nova.compute.manager [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 753.611936] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg d3c49c24ff944f8491c8a68d10c4ef11 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 753.613000] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.362s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 753.614328] env[61273]: INFO nova.compute.claims [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 753.615814] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg 08490e770a9a4e9a95761f83ca85f4dd in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 753.652664] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d3c49c24ff944f8491c8a68d10c4ef11 [ 753.668457] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 08490e770a9a4e9a95761f83ca85f4dd [ 753.753078] env[61273]: DEBUG oslo_vmware.api [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Task: {'id': task-375329, 'name': PowerOnVM_Task, 'duration_secs': 0.42767} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.753539] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Powered on the VM {{(pid=61273) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 753.753850] env[61273]: INFO nova.compute.manager [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Took 4.81 seconds to spawn the instance on the hypervisor. [ 753.754145] env[61273]: DEBUG nova.compute.manager [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Checking state {{(pid=61273) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 753.755175] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0384d546-f787-484a-8cfa-561cf49e7755 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.764040] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Expecting reply to msg 42a3cd49792f448aa24926bfb8b7c476 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 753.805816] env[61273]: DEBUG nova.network.neutron [req-b5597d43-6ff1-4c63-938b-dffcc704af91 req-03b021d5-6bc6-4181-a24e-59d15088ffac service nova] [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 753.810776] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 42a3cd49792f448aa24926bfb8b7c476 [ 753.935820] env[61273]: DEBUG nova.network.neutron [req-b5597d43-6ff1-4c63-938b-dffcc704af91 req-03b021d5-6bc6-4181-a24e-59d15088ffac service nova] [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 753.936417] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-b5597d43-6ff1-4c63-938b-dffcc704af91 req-03b021d5-6bc6-4181-a24e-59d15088ffac service nova] Expecting reply to msg 29c2845eec05456b8adf6b87327c2444 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 753.946702] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 29c2845eec05456b8adf6b87327c2444 [ 754.119325] env[61273]: DEBUG nova.compute.utils [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 754.120041] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg 77ad8e710f5b4f7895fc353a81709f08 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 754.122458] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg e682a4547e6748fda6bf3ee0fb4dc67d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 754.123561] env[61273]: DEBUG nova.compute.manager [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 754.123877] env[61273]: DEBUG nova.network.neutron [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 754.130048] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e682a4547e6748fda6bf3ee0fb4dc67d [ 754.134287] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 77ad8e710f5b4f7895fc353a81709f08 [ 754.186132] env[61273]: DEBUG nova.policy [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f09e88112924400db81a4fbe611482f0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '70c9e6d7debd4d8e8cb7790975294a22', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 754.274605] env[61273]: INFO nova.compute.manager [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Took 26.37 seconds to build instance. [ 754.275056] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Expecting reply to msg 8f4dae08bbd14ed6b91b0d8b9b602e65 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 754.304651] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8f4dae08bbd14ed6b91b0d8b9b602e65 [ 754.440389] env[61273]: DEBUG oslo_concurrency.lockutils [req-b5597d43-6ff1-4c63-938b-dffcc704af91 req-03b021d5-6bc6-4181-a24e-59d15088ffac service nova] Releasing lock "refresh_cache-2cd160c6-98ac-44a7-831e-d0fa3a958b99" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 754.440786] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] Acquired lock "refresh_cache-2cd160c6-98ac-44a7-831e-d0fa3a958b99" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 754.440977] env[61273]: DEBUG nova.network.neutron [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 754.441405] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] Expecting reply to msg 7530eb7ceeb84fdcb81561dbf04c24aa in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 754.448477] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7530eb7ceeb84fdcb81561dbf04c24aa [ 754.558726] env[61273]: DEBUG nova.network.neutron [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] Successfully created port: 6d97bb17-557c-40e8-91c4-7a67086718c1 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 754.624915] env[61273]: DEBUG nova.compute.manager [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 754.626879] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg fe1c88d074784d00a6017843a6523f52 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 754.666635] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fe1c88d074784d00a6017843a6523f52 [ 754.746816] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Expecting reply to msg 5df47ff64bce49dd9f215c21e4ec554c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 754.759564] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5df47ff64bce49dd9f215c21e4ec554c [ 754.777196] env[61273]: DEBUG oslo_concurrency.lockutils [None req-4f38619e-3871-4328-9efd-61cd5d12c60d tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Lock "f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 97.627s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 754.777839] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Expecting reply to msg 9bd7ea1188e347129c6a2e64478641dc in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 754.795784] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9bd7ea1188e347129c6a2e64478641dc [ 754.952943] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b3f9fd9-55f6-44f2-8506-b5768583d33a {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.960585] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efb07bf9-4146-4e18-a9f6-8b1843b81aa6 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.964091] env[61273]: DEBUG nova.network.neutron [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 754.993611] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc88f2b7-b1fc-4913-a8e9-cb4ffa4f523c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.000882] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b338787a-4db4-4e8d-acf1-26326eecb0bd {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.014372] env[61273]: DEBUG nova.compute.provider_tree [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 755.014798] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg 2e3a3f46b3874bf1b182d84a43c09aa7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 755.022963] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2e3a3f46b3874bf1b182d84a43c09aa7 [ 755.062934] env[61273]: DEBUG nova.network.neutron [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 755.062934] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] Expecting reply to msg a9fdc81e5ef94d4c89b69ca2fc32f823 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 755.070964] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a9fdc81e5ef94d4c89b69ca2fc32f823 [ 755.134074] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg b2ac0cfb5721409c9cd6d9d1c713ea4b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 755.177165] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b2ac0cfb5721409c9cd6d9d1c713ea4b [ 755.250337] env[61273]: INFO nova.compute.manager [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Rebuilding instance [ 755.279629] env[61273]: DEBUG nova.compute.manager [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 755.282101] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Expecting reply to msg 13fad981ab004b9ea3430dab51c16c10 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 755.293089] env[61273]: DEBUG nova.compute.manager [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Checking state {{(pid=61273) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 755.294051] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-119b350d-a31f-4faf-a246-385e41f52b35 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.301935] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Expecting reply to msg d94d23bc6010422699c02210cae557ad in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 755.317294] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 13fad981ab004b9ea3430dab51c16c10 [ 755.334450] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d94d23bc6010422699c02210cae557ad [ 755.340360] env[61273]: DEBUG nova.compute.manager [req-08ce8e1e-fccc-4044-917d-5d023d4a4917 req-c3938bc0-daa4-4d45-92a0-7e65e643ca40 service nova] [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] Received event network-vif-deleted-b17ade9b-9979-4941-8442-f0ef91d65a14 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 755.523035] env[61273]: DEBUG nova.scheduler.client.report [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 755.523035] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg 45e96c39d6b44ad18914c7639dbceeec in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 755.537094] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 45e96c39d6b44ad18914c7639dbceeec [ 755.564341] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] Releasing lock "refresh_cache-2cd160c6-98ac-44a7-831e-d0fa3a958b99" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 755.565086] env[61273]: DEBUG nova.compute.manager [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 755.565418] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 755.565844] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-133619b0-5ad6-45a9-933b-612f0d0b922e {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.576973] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8866e843-8e4c-4694-b59c-017f147cf6b4 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.604842] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2cd160c6-98ac-44a7-831e-d0fa3a958b99 could not be found. [ 755.605219] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 755.605517] env[61273]: INFO nova.compute.manager [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] Took 0.04 seconds to destroy the instance on the hypervisor. [ 755.605877] env[61273]: DEBUG oslo.service.loopingcall [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 755.606194] env[61273]: DEBUG nova.compute.manager [-] [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 755.606377] env[61273]: DEBUG nova.network.neutron [-] [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 755.621062] env[61273]: DEBUG nova.network.neutron [-] [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 755.621773] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 881ca51d71604434b8e31f794e39fbb0 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 755.631324] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 881ca51d71604434b8e31f794e39fbb0 [ 755.637219] env[61273]: DEBUG nova.compute.manager [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 755.658000] env[61273]: DEBUG nova.virt.hardware [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 755.658398] env[61273]: DEBUG nova.virt.hardware [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 755.658662] env[61273]: DEBUG nova.virt.hardware [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 755.658942] env[61273]: DEBUG nova.virt.hardware [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 755.659188] env[61273]: DEBUG nova.virt.hardware [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 755.659449] env[61273]: DEBUG nova.virt.hardware [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 755.659801] env[61273]: DEBUG nova.virt.hardware [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 755.660086] env[61273]: DEBUG nova.virt.hardware [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 755.660351] env[61273]: DEBUG nova.virt.hardware [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 755.660605] env[61273]: DEBUG nova.virt.hardware [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 755.660894] env[61273]: DEBUG nova.virt.hardware [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 755.662032] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b704adf-319f-46cf-a432-166c3bf37513 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.670194] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a91e119f-d2ba-467d-b57a-b92d8cc32c03 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.803029] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 755.805475] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Powering off the VM {{(pid=61273) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 755.805847] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b59d368a-63ca-4ff0-b83b-5e3a161dc7e2 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.812792] env[61273]: DEBUG oslo_vmware.api [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Waiting for the task: (returnval){ [ 755.812792] env[61273]: value = "task-375330" [ 755.812792] env[61273]: _type = "Task" [ 755.812792] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.847555] env[61273]: DEBUG oslo_vmware.api [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Task: {'id': task-375330, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.024947] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.412s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 756.025677] env[61273]: DEBUG nova.compute.manager [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 756.027495] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg 7b7a9d53bb394c79b88bea72b9ef8d28 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 756.028649] env[61273]: DEBUG oslo_concurrency.lockutils [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.009s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 756.030654] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Expecting reply to msg 1a1f8a4008794552a495fa76eedb41a4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 756.069056] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7b7a9d53bb394c79b88bea72b9ef8d28 [ 756.071863] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1a1f8a4008794552a495fa76eedb41a4 [ 756.125987] env[61273]: DEBUG nova.network.neutron [-] [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 756.126729] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg ff183cde4cbc46e7859e79b4645c7b9f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 756.137318] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ff183cde4cbc46e7859e79b4645c7b9f [ 756.272487] env[61273]: ERROR nova.compute.manager [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 6d97bb17-557c-40e8-91c4-7a67086718c1, please check neutron logs for more information. [ 756.272487] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 756.272487] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 756.272487] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 756.272487] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 756.272487] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 756.272487] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 756.272487] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 756.272487] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 756.272487] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 756.272487] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 756.272487] env[61273]: ERROR nova.compute.manager raise self.value [ 756.272487] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 756.272487] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 756.272487] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 756.272487] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 756.272860] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 756.272860] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 756.272860] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 6d97bb17-557c-40e8-91c4-7a67086718c1, please check neutron logs for more information. [ 756.272860] env[61273]: ERROR nova.compute.manager [ 756.273234] env[61273]: Traceback (most recent call last): [ 756.273328] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 756.273328] env[61273]: listener.cb(fileno) [ 756.273404] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 756.273404] env[61273]: result = function(*args, **kwargs) [ 756.273468] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 756.273468] env[61273]: return func(*args, **kwargs) [ 756.273532] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 756.273532] env[61273]: raise e [ 756.273594] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 756.273594] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 756.273654] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 756.273654] env[61273]: created_port_ids = self._update_ports_for_instance( [ 756.273719] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 756.273719] env[61273]: with excutils.save_and_reraise_exception(): [ 756.273781] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 756.273781] env[61273]: self.force_reraise() [ 756.274195] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 756.274195] env[61273]: raise self.value [ 756.274319] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 756.274319] env[61273]: updated_port = self._update_port( [ 756.274412] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 756.274412] env[61273]: _ensure_no_port_binding_failure(port) [ 756.274473] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 756.274473] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 756.274579] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 6d97bb17-557c-40e8-91c4-7a67086718c1, please check neutron logs for more information. [ 756.274641] env[61273]: Removing descriptor: 19 [ 756.276083] env[61273]: ERROR nova.compute.manager [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 6d97bb17-557c-40e8-91c4-7a67086718c1, please check neutron logs for more information. [ 756.276083] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] Traceback (most recent call last): [ 756.276083] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 756.276083] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] yield resources [ 756.276083] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 756.276083] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] self.driver.spawn(context, instance, image_meta, [ 756.276083] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 756.276083] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 756.276083] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 756.276083] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] vm_ref = self.build_virtual_machine(instance, [ 756.276083] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 756.276347] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] vif_infos = vmwarevif.get_vif_info(self._session, [ 756.276347] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 756.276347] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] for vif in network_info: [ 756.276347] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 756.276347] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] return self._sync_wrapper(fn, *args, **kwargs) [ 756.276347] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 756.276347] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] self.wait() [ 756.276347] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 756.276347] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] self[:] = self._gt.wait() [ 756.276347] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 756.276347] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] return self._exit_event.wait() [ 756.276347] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 756.276347] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] result = hub.switch() [ 756.276620] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 756.276620] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] return self.greenlet.switch() [ 756.276620] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 756.276620] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] result = function(*args, **kwargs) [ 756.276620] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 756.276620] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] return func(*args, **kwargs) [ 756.276620] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 756.276620] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] raise e [ 756.276620] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 756.276620] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] nwinfo = self.network_api.allocate_for_instance( [ 756.276620] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 756.276620] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] created_port_ids = self._update_ports_for_instance( [ 756.276620] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 756.276888] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] with excutils.save_and_reraise_exception(): [ 756.276888] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 756.276888] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] self.force_reraise() [ 756.276888] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 756.276888] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] raise self.value [ 756.276888] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 756.276888] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] updated_port = self._update_port( [ 756.276888] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 756.276888] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] _ensure_no_port_binding_failure(port) [ 756.276888] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 756.276888] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] raise exception.PortBindingFailed(port_id=port['id']) [ 756.276888] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] nova.exception.PortBindingFailed: Binding failed for port 6d97bb17-557c-40e8-91c4-7a67086718c1, please check neutron logs for more information. [ 756.276888] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] [ 756.278700] env[61273]: INFO nova.compute.manager [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] Terminating instance [ 756.281136] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Acquiring lock "refresh_cache-82f77423-cee6-4a04-8463-cabe57cba9cf" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 756.281456] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Acquired lock "refresh_cache-82f77423-cee6-4a04-8463-cabe57cba9cf" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 756.281772] env[61273]: DEBUG nova.network.neutron [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 756.282302] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg 1118e0f4ae5b4addb71deefa7a1fee56 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 756.289868] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1118e0f4ae5b4addb71deefa7a1fee56 [ 756.332325] env[61273]: DEBUG oslo_vmware.api [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Task: {'id': task-375330, 'name': PowerOffVM_Task, 'duration_secs': 0.113503} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.333177] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Powered off the VM {{(pid=61273) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 756.333526] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 756.334996] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56fa13c9-d0a5-4e0c-a07d-06e0afe60003 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.344738] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Unregistering the VM {{(pid=61273) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 756.345111] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-17c7c390-e19d-43b9-bbb4-36405663ce5c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.370854] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Unregistered the VM {{(pid=61273) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 756.371475] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Deleting contents of the VM from datastore datastore1 {{(pid=61273) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 756.371892] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Deleting the datastore file [datastore1] f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc {{(pid=61273) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 756.372596] env[61273]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-51afedca-20c6-4c16-8945-9294402260ee {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.380897] env[61273]: DEBUG oslo_vmware.api [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Waiting for the task: (returnval){ [ 756.380897] env[61273]: value = "task-375332" [ 756.380897] env[61273]: _type = "Task" [ 756.380897] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.389540] env[61273]: DEBUG oslo_vmware.api [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Task: {'id': task-375332, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.533733] env[61273]: DEBUG nova.compute.utils [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 756.534379] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg 77c089df8fc846c99acc69ad17b228fa in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 756.538185] env[61273]: DEBUG nova.compute.manager [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 756.538365] env[61273]: DEBUG nova.network.neutron [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 756.544730] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 77c089df8fc846c99acc69ad17b228fa [ 756.623088] env[61273]: DEBUG nova.policy [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f09e88112924400db81a4fbe611482f0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '70c9e6d7debd4d8e8cb7790975294a22', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 756.632330] env[61273]: INFO nova.compute.manager [-] [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] Took 1.03 seconds to deallocate network for instance. [ 756.636386] env[61273]: DEBUG nova.compute.claims [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 756.636386] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 756.802149] env[61273]: DEBUG nova.network.neutron [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 756.880040] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48babeb7-e4f7-447b-a334-d08f6493597e {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.887354] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beade83c-c044-431a-b18b-00e790d9525d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.894705] env[61273]: DEBUG oslo_vmware.api [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Task: {'id': task-375332, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.104135} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.895309] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Deleted the datastore file {{(pid=61273) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 756.895500] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Deleted contents of the VM from datastore datastore1 {{(pid=61273) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 756.895667] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 756.897364] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Expecting reply to msg a42f20c3032d4824bdc4e068eff4c9bb in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 756.922856] env[61273]: DEBUG nova.network.neutron [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 756.923359] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg 6a290b17104f46c9973577b2c1abc6b8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 756.924881] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b918a51b-bc5d-4d3f-b1ee-ab2941255afd {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.932867] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b873649-5066-4912-add0-d2855df997a7 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.937045] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6a290b17104f46c9973577b2c1abc6b8 [ 756.952064] env[61273]: DEBUG nova.compute.provider_tree [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 756.952064] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Expecting reply to msg 9fad90da706a4a779531c718f95eaa96 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 756.952064] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a42f20c3032d4824bdc4e068eff4c9bb [ 756.958555] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9fad90da706a4a779531c718f95eaa96 [ 757.042438] env[61273]: DEBUG nova.compute.manager [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 757.043761] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg 1f28420d85e94e0f95e411932f259948 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 757.080100] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1f28420d85e94e0f95e411932f259948 [ 757.139936] env[61273]: DEBUG nova.network.neutron [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] Successfully created port: 111bf606-b67a-4d6a-8de1-a66912dc3f30 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 757.402803] env[61273]: DEBUG nova.compute.manager [req-87509445-2c74-4fb8-a438-fdf5ec6674fb req-5e401ebb-adef-4859-969e-5102941325f2 service nova] [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] Received event network-changed-6d97bb17-557c-40e8-91c4-7a67086718c1 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 757.403006] env[61273]: DEBUG nova.compute.manager [req-87509445-2c74-4fb8-a438-fdf5ec6674fb req-5e401ebb-adef-4859-969e-5102941325f2 service nova] [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] Refreshing instance network info cache due to event network-changed-6d97bb17-557c-40e8-91c4-7a67086718c1. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 757.403190] env[61273]: DEBUG oslo_concurrency.lockutils [req-87509445-2c74-4fb8-a438-fdf5ec6674fb req-5e401ebb-adef-4859-969e-5102941325f2 service nova] Acquiring lock "refresh_cache-82f77423-cee6-4a04-8463-cabe57cba9cf" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 757.427193] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Expecting reply to msg 5bbbdd9ce9654a018888b0fcbe749364 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 757.428375] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Releasing lock "refresh_cache-82f77423-cee6-4a04-8463-cabe57cba9cf" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 757.428706] env[61273]: DEBUG nova.compute.manager [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 757.428885] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 757.429338] env[61273]: DEBUG oslo_concurrency.lockutils [req-87509445-2c74-4fb8-a438-fdf5ec6674fb req-5e401ebb-adef-4859-969e-5102941325f2 service nova] Acquired lock "refresh_cache-82f77423-cee6-4a04-8463-cabe57cba9cf" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 757.429506] env[61273]: DEBUG nova.network.neutron [req-87509445-2c74-4fb8-a438-fdf5ec6674fb req-5e401ebb-adef-4859-969e-5102941325f2 service nova] [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] Refreshing network info cache for port 6d97bb17-557c-40e8-91c4-7a67086718c1 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 757.429865] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-87509445-2c74-4fb8-a438-fdf5ec6674fb req-5e401ebb-adef-4859-969e-5102941325f2 service nova] Expecting reply to msg 396c1f775e8e48e1a3b86619157c099b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 757.430518] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e7e4cf89-4fb4-4a5c-9092-94abdc0e4e80 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.441814] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ebf942a-c39e-4206-825d-e92d4bd626d6 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.452549] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 396c1f775e8e48e1a3b86619157c099b [ 757.453554] env[61273]: DEBUG nova.scheduler.client.report [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 757.455893] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Expecting reply to msg 81f9f49b3e2f440d9dac943914ec8213 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 757.458423] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5bbbdd9ce9654a018888b0fcbe749364 [ 757.469781] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 82f77423-cee6-4a04-8463-cabe57cba9cf could not be found. [ 757.469991] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 757.470167] env[61273]: INFO nova.compute.manager [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] Took 0.04 seconds to destroy the instance on the hypervisor. [ 757.470405] env[61273]: DEBUG oslo.service.loopingcall [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 757.470867] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 81f9f49b3e2f440d9dac943914ec8213 [ 757.471223] env[61273]: DEBUG nova.compute.manager [-] [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 757.471329] env[61273]: DEBUG nova.network.neutron [-] [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 757.490994] env[61273]: DEBUG nova.network.neutron [-] [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 757.491503] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 872f97e972fd431c9e6c356c713891d3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 757.498980] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 872f97e972fd431c9e6c356c713891d3 [ 757.549902] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg 31086f587b31480dad3b1d43305b6565 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 757.589213] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 31086f587b31480dad3b1d43305b6565 [ 757.952421] env[61273]: DEBUG nova.network.neutron [req-87509445-2c74-4fb8-a438-fdf5ec6674fb req-5e401ebb-adef-4859-969e-5102941325f2 service nova] [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 757.956466] env[61273]: DEBUG nova.virt.hardware [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 757.956739] env[61273]: DEBUG nova.virt.hardware [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 757.956940] env[61273]: DEBUG nova.virt.hardware [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 757.957166] env[61273]: DEBUG nova.virt.hardware [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 757.957351] env[61273]: DEBUG nova.virt.hardware [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 757.957534] env[61273]: DEBUG nova.virt.hardware [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 757.957776] env[61273]: DEBUG nova.virt.hardware [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 757.957976] env[61273]: DEBUG nova.virt.hardware [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 757.958212] env[61273]: DEBUG nova.virt.hardware [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 757.958374] env[61273]: DEBUG nova.virt.hardware [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 757.958666] env[61273]: DEBUG nova.virt.hardware [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 757.959927] env[61273]: DEBUG oslo_concurrency.lockutils [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.931s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 757.960463] env[61273]: ERROR nova.compute.manager [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 7eb3b801-5a05-42ee-8612-36ba45119473, please check neutron logs for more information. [ 757.960463] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] Traceback (most recent call last): [ 757.960463] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 757.960463] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] self.driver.spawn(context, instance, image_meta, [ 757.960463] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 757.960463] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 757.960463] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 757.960463] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] vm_ref = self.build_virtual_machine(instance, [ 757.960463] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 757.960463] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] vif_infos = vmwarevif.get_vif_info(self._session, [ 757.960463] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 757.960773] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] for vif in network_info: [ 757.960773] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 757.960773] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] return self._sync_wrapper(fn, *args, **kwargs) [ 757.960773] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 757.960773] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] self.wait() [ 757.960773] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 757.960773] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] self[:] = self._gt.wait() [ 757.960773] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 757.960773] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] return self._exit_event.wait() [ 757.960773] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 757.960773] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] result = hub.switch() [ 757.960773] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 757.960773] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] return self.greenlet.switch() [ 757.961083] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 757.961083] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] result = function(*args, **kwargs) [ 757.961083] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 757.961083] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] return func(*args, **kwargs) [ 757.961083] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 757.961083] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] raise e [ 757.961083] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 757.961083] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] nwinfo = self.network_api.allocate_for_instance( [ 757.961083] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 757.961083] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] created_port_ids = self._update_ports_for_instance( [ 757.961083] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 757.961083] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] with excutils.save_and_reraise_exception(): [ 757.961083] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 757.961380] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] self.force_reraise() [ 757.961380] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 757.961380] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] raise self.value [ 757.961380] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 757.961380] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] updated_port = self._update_port( [ 757.961380] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 757.961380] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] _ensure_no_port_binding_failure(port) [ 757.961380] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 757.961380] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] raise exception.PortBindingFailed(port_id=port['id']) [ 757.961380] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] nova.exception.PortBindingFailed: Binding failed for port 7eb3b801-5a05-42ee-8612-36ba45119473, please check neutron logs for more information. [ 757.961380] env[61273]: ERROR nova.compute.manager [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] [ 757.961859] env[61273]: DEBUG nova.compute.utils [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] Binding failed for port 7eb3b801-5a05-42ee-8612-36ba45119473, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 757.963682] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8b3e2c1-a55b-4530-b35d-237a9b5a0519 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.967269] env[61273]: DEBUG nova.compute.manager [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] Build of instance 799d4a06-f7a3-4b92-8e96-ac076848fdd3 was re-scheduled: Binding failed for port 7eb3b801-5a05-42ee-8612-36ba45119473, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 757.967793] env[61273]: DEBUG nova.compute.manager [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 757.968091] env[61273]: DEBUG oslo_concurrency.lockutils [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Acquiring lock "refresh_cache-799d4a06-f7a3-4b92-8e96-ac076848fdd3" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 757.968271] env[61273]: DEBUG oslo_concurrency.lockutils [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Acquired lock "refresh_cache-799d4a06-f7a3-4b92-8e96-ac076848fdd3" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 757.968495] env[61273]: DEBUG nova.network.neutron [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 757.969270] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Expecting reply to msg c0db001244da49ac9dcc87b2ebbad48f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 757.969814] env[61273]: DEBUG oslo_concurrency.lockutils [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.310s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 757.978786] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] Expecting reply to msg 50961cef10c44a9bb144c7446020937f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 757.980882] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c0db001244da49ac9dcc87b2ebbad48f [ 757.989057] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71c8bf26-87f1-452d-bc48-965733a972f2 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.994129] env[61273]: DEBUG nova.network.neutron [-] [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 757.994674] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 39c6786e96d04dd7acf6c184697eacc9 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 758.006645] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 39c6786e96d04dd7acf6c184697eacc9 [ 758.007316] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Instance VIF info [] {{(pid=61273) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 758.015301] env[61273]: DEBUG oslo.service.loopingcall [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 758.016161] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Creating VM on the ESX host {{(pid=61273) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 758.016676] env[61273]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d2491f14-0b22-4546-b0ef-045d0b5a58de {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.033609] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 50961cef10c44a9bb144c7446020937f [ 758.038702] env[61273]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 758.038702] env[61273]: value = "task-375333" [ 758.038702] env[61273]: _type = "Task" [ 758.038702] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.050107] env[61273]: DEBUG oslo_vmware.api [-] Task: {'id': task-375333, 'name': CreateVM_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.053617] env[61273]: DEBUG nova.compute.manager [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 758.083011] env[61273]: DEBUG nova.virt.hardware [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 758.083436] env[61273]: DEBUG nova.virt.hardware [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 758.083698] env[61273]: DEBUG nova.virt.hardware [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 758.084066] env[61273]: DEBUG nova.virt.hardware [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 758.084333] env[61273]: DEBUG nova.virt.hardware [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 758.084701] env[61273]: DEBUG nova.virt.hardware [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 758.085024] env[61273]: DEBUG nova.virt.hardware [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 758.085305] env[61273]: DEBUG nova.virt.hardware [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 758.085598] env[61273]: DEBUG nova.virt.hardware [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 758.085865] env[61273]: DEBUG nova.virt.hardware [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 758.086137] env[61273]: DEBUG nova.virt.hardware [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 758.087139] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86cd02ff-fb05-4b1b-90e4-ee03de47f147 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.095846] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff738bb9-3e4d-477e-b0b1-467fc1fcf144 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.101673] env[61273]: DEBUG nova.network.neutron [req-87509445-2c74-4fb8-a438-fdf5ec6674fb req-5e401ebb-adef-4859-969e-5102941325f2 service nova] [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 758.102310] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-87509445-2c74-4fb8-a438-fdf5ec6674fb req-5e401ebb-adef-4859-969e-5102941325f2 service nova] Expecting reply to msg b006ad0b8cca4b408a117d1bb8b9ba50 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 758.114639] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b006ad0b8cca4b408a117d1bb8b9ba50 [ 758.495485] env[61273]: ERROR nova.compute.manager [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 111bf606-b67a-4d6a-8de1-a66912dc3f30, please check neutron logs for more information. [ 758.495485] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 758.495485] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 758.495485] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 758.495485] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 758.495485] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 758.495485] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 758.495485] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 758.495485] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 758.495485] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 758.495485] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 758.495485] env[61273]: ERROR nova.compute.manager raise self.value [ 758.495485] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 758.495485] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 758.495485] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 758.495485] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 758.496059] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 758.496059] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 758.496059] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 111bf606-b67a-4d6a-8de1-a66912dc3f30, please check neutron logs for more information. [ 758.496059] env[61273]: ERROR nova.compute.manager [ 758.496059] env[61273]: Traceback (most recent call last): [ 758.496059] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 758.496059] env[61273]: listener.cb(fileno) [ 758.496059] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 758.496059] env[61273]: result = function(*args, **kwargs) [ 758.496059] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 758.496059] env[61273]: return func(*args, **kwargs) [ 758.496059] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 758.496059] env[61273]: raise e [ 758.496059] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 758.496059] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 758.496059] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 758.496059] env[61273]: created_port_ids = self._update_ports_for_instance( [ 758.496059] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 758.496059] env[61273]: with excutils.save_and_reraise_exception(): [ 758.496059] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 758.496059] env[61273]: self.force_reraise() [ 758.496059] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 758.496059] env[61273]: raise self.value [ 758.496059] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 758.496059] env[61273]: updated_port = self._update_port( [ 758.496059] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 758.496059] env[61273]: _ensure_no_port_binding_failure(port) [ 758.496059] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 758.496059] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 758.496752] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 111bf606-b67a-4d6a-8de1-a66912dc3f30, please check neutron logs for more information. [ 758.496752] env[61273]: Removing descriptor: 19 [ 758.496752] env[61273]: ERROR nova.compute.manager [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 111bf606-b67a-4d6a-8de1-a66912dc3f30, please check neutron logs for more information. [ 758.496752] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] Traceback (most recent call last): [ 758.496752] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 758.496752] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] yield resources [ 758.496752] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 758.496752] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] self.driver.spawn(context, instance, image_meta, [ 758.496752] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 758.496752] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] self._vmops.spawn(context, instance, image_meta, injected_files, [ 758.496752] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 758.496752] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] vm_ref = self.build_virtual_machine(instance, [ 758.497002] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 758.497002] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] vif_infos = vmwarevif.get_vif_info(self._session, [ 758.497002] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 758.497002] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] for vif in network_info: [ 758.497002] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 758.497002] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] return self._sync_wrapper(fn, *args, **kwargs) [ 758.497002] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 758.497002] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] self.wait() [ 758.497002] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 758.497002] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] self[:] = self._gt.wait() [ 758.497002] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 758.497002] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] return self._exit_event.wait() [ 758.497002] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 758.497658] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] result = hub.switch() [ 758.497658] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 758.497658] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] return self.greenlet.switch() [ 758.497658] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 758.497658] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] result = function(*args, **kwargs) [ 758.497658] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 758.497658] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] return func(*args, **kwargs) [ 758.497658] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 758.497658] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] raise e [ 758.497658] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 758.497658] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] nwinfo = self.network_api.allocate_for_instance( [ 758.497658] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 758.497658] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] created_port_ids = self._update_ports_for_instance( [ 758.497925] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 758.497925] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] with excutils.save_and_reraise_exception(): [ 758.497925] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 758.497925] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] self.force_reraise() [ 758.497925] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 758.497925] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] raise self.value [ 758.497925] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 758.497925] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] updated_port = self._update_port( [ 758.497925] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 758.497925] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] _ensure_no_port_binding_failure(port) [ 758.497925] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 758.497925] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] raise exception.PortBindingFailed(port_id=port['id']) [ 758.498174] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] nova.exception.PortBindingFailed: Binding failed for port 111bf606-b67a-4d6a-8de1-a66912dc3f30, please check neutron logs for more information. [ 758.498174] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] [ 758.498174] env[61273]: INFO nova.compute.manager [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] Terminating instance [ 758.498174] env[61273]: INFO nova.compute.manager [-] [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] Took 1.03 seconds to deallocate network for instance. [ 758.500838] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Acquiring lock "refresh_cache-a0a40c68-77e2-4152-ac2e-059f8f7a8f78" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 758.501041] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Acquired lock "refresh_cache-a0a40c68-77e2-4152-ac2e-059f8f7a8f78" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 758.501827] env[61273]: DEBUG nova.network.neutron [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 758.507991] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg c685a17e00294a7f8d6259b52692c050 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 758.509510] env[61273]: DEBUG nova.network.neutron [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 758.515327] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c685a17e00294a7f8d6259b52692c050 [ 758.519556] env[61273]: DEBUG nova.compute.claims [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 758.519750] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 758.556117] env[61273]: DEBUG oslo_vmware.api [-] Task: {'id': task-375333, 'name': CreateVM_Task, 'duration_secs': 0.24391} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.556281] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Created VM on the ESX host {{(pid=61273) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 758.556900] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 758.557061] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 758.557376] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 758.559885] env[61273]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf7525b5-a458-400d-8908-5d31022cfc87 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.564706] env[61273]: DEBUG oslo_vmware.api [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Waiting for the task: (returnval){ [ 758.564706] env[61273]: value = "session[527e49f3-ce92-061c-8407-29c8a2392124]52008f33-5b4c-f5ae-68b9-d07344d6c13e" [ 758.564706] env[61273]: _type = "Task" [ 758.564706] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.574563] env[61273]: DEBUG oslo_vmware.api [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]52008f33-5b4c-f5ae-68b9-d07344d6c13e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.607123] env[61273]: DEBUG nova.network.neutron [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 758.607802] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Expecting reply to msg 69c05ae06a5047f59da0ab462e1eb26b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 758.617668] env[61273]: DEBUG oslo_concurrency.lockutils [req-87509445-2c74-4fb8-a438-fdf5ec6674fb req-5e401ebb-adef-4859-969e-5102941325f2 service nova] Releasing lock "refresh_cache-82f77423-cee6-4a04-8463-cabe57cba9cf" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 758.618062] env[61273]: DEBUG nova.compute.manager [req-87509445-2c74-4fb8-a438-fdf5ec6674fb req-5e401ebb-adef-4859-969e-5102941325f2 service nova] [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] Received event network-vif-deleted-6d97bb17-557c-40e8-91c4-7a67086718c1 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 758.618599] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 69c05ae06a5047f59da0ab462e1eb26b [ 758.799562] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b249abdb-de43-445b-9eb0-df28ac5973cf {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.808066] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7114d5bb-5bb0-4644-98e4-ec6022e8f14d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.838439] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3991c3c2-c7b5-4c4b-9918-0fda75d2da80 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.845824] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db120407-e5b8-4d4f-9cb0-f9e2092fe2ec {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.858679] env[61273]: DEBUG nova.compute.provider_tree [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 758.859192] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] Expecting reply to msg 24abfd947c0e4e0f92c9dbe53e327efe in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 758.866952] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 24abfd947c0e4e0f92c9dbe53e327efe [ 759.028297] env[61273]: DEBUG nova.network.neutron [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 759.077097] env[61273]: DEBUG oslo_vmware.api [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]52008f33-5b4c-f5ae-68b9-d07344d6c13e, 'name': SearchDatastore_Task, 'duration_secs': 0.008714} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.077393] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 759.077618] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Processing image 4a9e718e-a6a1-4b4a-b567-8e55529b2d5b {{(pid=61273) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 759.077854] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 759.077975] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 759.078146] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61273) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 759.078465] env[61273]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ea19292c-62b3-41c8-8225-13b520aaee56 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.086409] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61273) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 759.086602] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61273) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 759.087560] env[61273]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6729f099-48bd-4d92-ae92-07ae36f33ffd {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.093261] env[61273]: DEBUG oslo_vmware.api [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Waiting for the task: (returnval){ [ 759.093261] env[61273]: value = "session[527e49f3-ce92-061c-8407-29c8a2392124]52c5818d-3eb8-fd16-b63b-cbbe200fff19" [ 759.093261] env[61273]: _type = "Task" [ 759.093261] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.100826] env[61273]: DEBUG oslo_vmware.api [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]52c5818d-3eb8-fd16-b63b-cbbe200fff19, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.104349] env[61273]: DEBUG nova.network.neutron [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 759.104846] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg 014b23b2872845a7b6a2d572974fac6d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 759.109839] env[61273]: DEBUG oslo_concurrency.lockutils [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Releasing lock "refresh_cache-799d4a06-f7a3-4b92-8e96-ac076848fdd3" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 759.109839] env[61273]: DEBUG nova.compute.manager [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 759.109947] env[61273]: DEBUG nova.compute.manager [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 759.110109] env[61273]: DEBUG nova.network.neutron [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 759.112427] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 014b23b2872845a7b6a2d572974fac6d [ 759.126392] env[61273]: DEBUG nova.network.neutron [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 759.126950] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Expecting reply to msg f1af0d22a26341189ccb5fccee1b8dc2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 759.135200] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f1af0d22a26341189ccb5fccee1b8dc2 [ 759.361405] env[61273]: DEBUG nova.scheduler.client.report [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 759.363979] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] Expecting reply to msg 92dd6d05dc6a453b9eb83abdfd190fee in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 759.374597] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 92dd6d05dc6a453b9eb83abdfd190fee [ 759.434013] env[61273]: DEBUG nova.compute.manager [req-7c411c8b-0d18-419f-a3f8-dc2f576a4829 req-a12a256d-7d3f-4568-99c4-8505d0a2cf47 service nova] [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] Received event network-changed-111bf606-b67a-4d6a-8de1-a66912dc3f30 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 759.434013] env[61273]: DEBUG nova.compute.manager [req-7c411c8b-0d18-419f-a3f8-dc2f576a4829 req-a12a256d-7d3f-4568-99c4-8505d0a2cf47 service nova] [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] Refreshing instance network info cache due to event network-changed-111bf606-b67a-4d6a-8de1-a66912dc3f30. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 759.434013] env[61273]: DEBUG oslo_concurrency.lockutils [req-7c411c8b-0d18-419f-a3f8-dc2f576a4829 req-a12a256d-7d3f-4568-99c4-8505d0a2cf47 service nova] Acquiring lock "refresh_cache-a0a40c68-77e2-4152-ac2e-059f8f7a8f78" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 759.603915] env[61273]: DEBUG oslo_vmware.api [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]52c5818d-3eb8-fd16-b63b-cbbe200fff19, 'name': SearchDatastore_Task, 'duration_secs': 0.007748} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.604713] env[61273]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-756c4a19-ef81-4c6d-95a3-8d65594bfb49 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.607129] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Releasing lock "refresh_cache-a0a40c68-77e2-4152-ac2e-059f8f7a8f78" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 759.607521] env[61273]: DEBUG nova.compute.manager [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 759.607708] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 759.607982] env[61273]: DEBUG oslo_concurrency.lockutils [req-7c411c8b-0d18-419f-a3f8-dc2f576a4829 req-a12a256d-7d3f-4568-99c4-8505d0a2cf47 service nova] Acquired lock "refresh_cache-a0a40c68-77e2-4152-ac2e-059f8f7a8f78" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 759.608180] env[61273]: DEBUG nova.network.neutron [req-7c411c8b-0d18-419f-a3f8-dc2f576a4829 req-a12a256d-7d3f-4568-99c4-8505d0a2cf47 service nova] [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] Refreshing network info cache for port 111bf606-b67a-4d6a-8de1-a66912dc3f30 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 759.608586] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-7c411c8b-0d18-419f-a3f8-dc2f576a4829 req-a12a256d-7d3f-4568-99c4-8505d0a2cf47 service nova] Expecting reply to msg b508b693d5fc42659a50c551cd8db52d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 759.609443] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-373a7495-7086-46bf-9136-26547e22b678 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.614335] env[61273]: DEBUG oslo_vmware.api [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Waiting for the task: (returnval){ [ 759.614335] env[61273]: value = "session[527e49f3-ce92-061c-8407-29c8a2392124]52c95da9-ae15-b532-2f77-3427a068abaf" [ 759.614335] env[61273]: _type = "Task" [ 759.614335] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.615738] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b508b693d5fc42659a50c551cd8db52d [ 759.621273] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4496e30-fd49-4578-b0b5-a538af2520d7 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.631848] env[61273]: DEBUG nova.network.neutron [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 759.632325] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Expecting reply to msg 8e1922e86e654becbc929f27f1a2b360 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 759.638249] env[61273]: DEBUG oslo_vmware.api [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]52c95da9-ae15-b532-2f77-3427a068abaf, 'name': SearchDatastore_Task, 'duration_secs': 0.008144} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.638484] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 759.638728] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk to [datastore1] f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc/f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc.vmdk {{(pid=61273) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 759.638967] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d61bcf24-f430-43dd-8ead-b23ee707bab7 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.641371] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8e1922e86e654becbc929f27f1a2b360 [ 759.646429] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a0a40c68-77e2-4152-ac2e-059f8f7a8f78 could not be found. [ 759.646629] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 759.646834] env[61273]: INFO nova.compute.manager [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] Took 0.04 seconds to destroy the instance on the hypervisor. [ 759.647073] env[61273]: DEBUG oslo.service.loopingcall [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 759.647596] env[61273]: DEBUG nova.compute.manager [-] [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 759.647691] env[61273]: DEBUG nova.network.neutron [-] [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 759.652363] env[61273]: DEBUG oslo_vmware.api [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Waiting for the task: (returnval){ [ 759.652363] env[61273]: value = "task-375334" [ 759.652363] env[61273]: _type = "Task" [ 759.652363] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.659908] env[61273]: DEBUG oslo_vmware.api [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Task: {'id': task-375334, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.662417] env[61273]: DEBUG nova.network.neutron [-] [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 759.662880] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 08712167199d4c148ff152a2a31ccdac in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 759.669718] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 08712167199d4c148ff152a2a31ccdac [ 759.866367] env[61273]: DEBUG oslo_concurrency.lockutils [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.896s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 759.867095] env[61273]: ERROR nova.compute.manager [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port d0b95e54-3f33-48d6-9f74-c633d85b9772, please check neutron logs for more information. [ 759.867095] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] Traceback (most recent call last): [ 759.867095] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 759.867095] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] self.driver.spawn(context, instance, image_meta, [ 759.867095] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 759.867095] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 759.867095] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 759.867095] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] vm_ref = self.build_virtual_machine(instance, [ 759.867095] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 759.867095] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] vif_infos = vmwarevif.get_vif_info(self._session, [ 759.867095] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 759.867446] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] for vif in network_info: [ 759.867446] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 759.867446] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] return self._sync_wrapper(fn, *args, **kwargs) [ 759.867446] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 759.867446] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] self.wait() [ 759.867446] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 759.867446] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] self[:] = self._gt.wait() [ 759.867446] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 759.867446] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] return self._exit_event.wait() [ 759.867446] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 759.867446] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] result = hub.switch() [ 759.867446] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 759.867446] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] return self.greenlet.switch() [ 759.867797] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 759.867797] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] result = function(*args, **kwargs) [ 759.867797] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 759.867797] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] return func(*args, **kwargs) [ 759.867797] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 759.867797] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] raise e [ 759.867797] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 759.867797] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] nwinfo = self.network_api.allocate_for_instance( [ 759.867797] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 759.867797] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] created_port_ids = self._update_ports_for_instance( [ 759.867797] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 759.867797] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] with excutils.save_and_reraise_exception(): [ 759.867797] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 759.868207] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] self.force_reraise() [ 759.868207] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 759.868207] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] raise self.value [ 759.868207] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 759.868207] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] updated_port = self._update_port( [ 759.868207] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 759.868207] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] _ensure_no_port_binding_failure(port) [ 759.868207] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 759.868207] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] raise exception.PortBindingFailed(port_id=port['id']) [ 759.868207] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] nova.exception.PortBindingFailed: Binding failed for port d0b95e54-3f33-48d6-9f74-c633d85b9772, please check neutron logs for more information. [ 759.868207] env[61273]: ERROR nova.compute.manager [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] [ 759.868469] env[61273]: DEBUG nova.compute.utils [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] Binding failed for port d0b95e54-3f33-48d6-9f74-c633d85b9772, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 759.869117] env[61273]: DEBUG oslo_concurrency.lockutils [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.619s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 759.871093] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 49c320caf09e4dcdb0c08e6151053bcb in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 759.874594] env[61273]: DEBUG nova.compute.manager [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] Build of instance 21213cff-55b3-48fd-91b4-6718f7819bc3 was re-scheduled: Binding failed for port d0b95e54-3f33-48d6-9f74-c633d85b9772, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 759.874594] env[61273]: DEBUG nova.compute.manager [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 759.874810] env[61273]: DEBUG oslo_concurrency.lockutils [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] Acquiring lock "refresh_cache-21213cff-55b3-48fd-91b4-6718f7819bc3" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 759.874990] env[61273]: DEBUG oslo_concurrency.lockutils [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] Acquired lock "refresh_cache-21213cff-55b3-48fd-91b4-6718f7819bc3" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 759.875155] env[61273]: DEBUG nova.network.neutron [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 759.875562] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] Expecting reply to msg 15ec16e82a594cd2b32f98933d3331e7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 759.886377] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 15ec16e82a594cd2b32f98933d3331e7 [ 759.905651] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 49c320caf09e4dcdb0c08e6151053bcb [ 759.935393] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Acquiring lock "fcdd594c-b89f-4d0b-a4d5-2644b3b62b56" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 759.935623] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Lock "fcdd594c-b89f-4d0b-a4d5-2644b3b62b56" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 760.128572] env[61273]: DEBUG nova.network.neutron [req-7c411c8b-0d18-419f-a3f8-dc2f576a4829 req-a12a256d-7d3f-4568-99c4-8505d0a2cf47 service nova] [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 760.135066] env[61273]: INFO nova.compute.manager [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] [instance: 799d4a06-f7a3-4b92-8e96-ac076848fdd3] Took 1.02 seconds to deallocate network for instance. [ 760.136877] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Expecting reply to msg c7fa47350e684e25a4d61dbc04c9ab70 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 760.163344] env[61273]: DEBUG oslo_vmware.api [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Task: {'id': task-375334, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.457537} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.163613] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk to [datastore1] f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc/f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc.vmdk {{(pid=61273) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 760.163807] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Extending root virtual disk to 1048576 {{(pid=61273) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 760.164094] env[61273]: DEBUG nova.network.neutron [-] [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 760.164484] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 487754f6d265459894d8482b47aa889e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 760.167423] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-36e836e5-ce8c-4cb5-968a-41e9569b5a95 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.167726] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c7fa47350e684e25a4d61dbc04c9ab70 [ 760.173174] env[61273]: DEBUG oslo_vmware.api [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Waiting for the task: (returnval){ [ 760.173174] env[61273]: value = "task-375335" [ 760.173174] env[61273]: _type = "Task" [ 760.173174] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.176801] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 487754f6d265459894d8482b47aa889e [ 760.182206] env[61273]: DEBUG oslo_vmware.api [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Task: {'id': task-375335, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.218230] env[61273]: DEBUG nova.network.neutron [req-7c411c8b-0d18-419f-a3f8-dc2f576a4829 req-a12a256d-7d3f-4568-99c4-8505d0a2cf47 service nova] [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 760.218784] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-7c411c8b-0d18-419f-a3f8-dc2f576a4829 req-a12a256d-7d3f-4568-99c4-8505d0a2cf47 service nova] Expecting reply to msg b51761fdbc7c4a07a260585ee34930f2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 760.228109] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b51761fdbc7c4a07a260585ee34930f2 [ 760.391251] env[61273]: DEBUG nova.network.neutron [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 760.457387] env[61273]: DEBUG nova.network.neutron [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 760.457901] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] Expecting reply to msg 155a199d3e804408b4f078fc37cba6dc in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 760.467713] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 155a199d3e804408b4f078fc37cba6dc [ 760.620316] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99a573ad-5c73-4905-9701-764d16c55146 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.627678] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc4525fb-969b-41a2-80a9-ede3c533da38 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.659206] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Expecting reply to msg cea280f781b3406789de23bdd316763d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 760.660849] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11b00615-65f6-4155-814e-224857c195e0 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.668172] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9f56d0c-bc80-4e41-8242-d36607796912 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.671906] env[61273]: INFO nova.compute.manager [-] [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] Took 1.02 seconds to deallocate network for instance. [ 760.674105] env[61273]: DEBUG nova.compute.claims [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 760.674335] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 760.686517] env[61273]: DEBUG nova.compute.provider_tree [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 760.687977] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 3c5e6b5a32a8492b8f0d5f63a9ee7ca2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 760.692205] env[61273]: DEBUG oslo_vmware.api [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Task: {'id': task-375335, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060771} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.692745] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cea280f781b3406789de23bdd316763d [ 760.693141] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Extended root virtual disk {{(pid=61273) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 760.693853] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63c6eeb5-32a8-4dea-a2be-d9ad3b37710d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.696416] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3c5e6b5a32a8492b8f0d5f63a9ee7ca2 [ 760.713161] env[61273]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Reconfiguring VM instance instance-00000032 to attach disk [datastore1] f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc/f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc.vmdk or device None with type sparse {{(pid=61273) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 760.713577] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5a15d7b6-7db3-475c-8a80-1441d0ff7a70 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.727451] env[61273]: DEBUG oslo_concurrency.lockutils [req-7c411c8b-0d18-419f-a3f8-dc2f576a4829 req-a12a256d-7d3f-4568-99c4-8505d0a2cf47 service nova] Releasing lock "refresh_cache-a0a40c68-77e2-4152-ac2e-059f8f7a8f78" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 760.727678] env[61273]: DEBUG nova.compute.manager [req-7c411c8b-0d18-419f-a3f8-dc2f576a4829 req-a12a256d-7d3f-4568-99c4-8505d0a2cf47 service nova] [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] Received event network-vif-deleted-111bf606-b67a-4d6a-8de1-a66912dc3f30 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 760.733255] env[61273]: DEBUG oslo_vmware.api [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Waiting for the task: (returnval){ [ 760.733255] env[61273]: value = "task-375336" [ 760.733255] env[61273]: _type = "Task" [ 760.733255] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.740479] env[61273]: DEBUG oslo_vmware.api [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Task: {'id': task-375336, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.960678] env[61273]: DEBUG oslo_concurrency.lockutils [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] Releasing lock "refresh_cache-21213cff-55b3-48fd-91b4-6718f7819bc3" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 760.960946] env[61273]: DEBUG nova.compute.manager [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 760.961131] env[61273]: DEBUG nova.compute.manager [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 760.961304] env[61273]: DEBUG nova.network.neutron [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 760.977015] env[61273]: DEBUG nova.network.neutron [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 760.977610] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] Expecting reply to msg 7a47065881784f988b4f6d8dc32332b2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 760.985656] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7a47065881784f988b4f6d8dc32332b2 [ 761.186430] env[61273]: INFO nova.scheduler.client.report [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Deleted allocations for instance 799d4a06-f7a3-4b92-8e96-ac076848fdd3 [ 761.195719] env[61273]: DEBUG nova.scheduler.client.report [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 761.199228] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg e4cf8c35e22444b8bcc7adf059e11eed in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 761.200962] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Expecting reply to msg aa51cd9a2b3d4c94acc2ec205ed8de75 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 761.211751] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e4cf8c35e22444b8bcc7adf059e11eed [ 761.239281] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aa51cd9a2b3d4c94acc2ec205ed8de75 [ 761.243237] env[61273]: DEBUG oslo_vmware.api [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Task: {'id': task-375336, 'name': ReconfigVM_Task, 'duration_secs': 0.313688} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.243492] env[61273]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Reconfigured VM instance instance-00000032 to attach disk [datastore1] f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc/f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc.vmdk or device None with type sparse {{(pid=61273) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 761.244605] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ab350342-7f7a-4063-b355-6c2e9b161dcf {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.251982] env[61273]: DEBUG oslo_vmware.api [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Waiting for the task: (returnval){ [ 761.251982] env[61273]: value = "task-375337" [ 761.251982] env[61273]: _type = "Task" [ 761.251982] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.260451] env[61273]: DEBUG oslo_vmware.api [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Task: {'id': task-375337, 'name': Rename_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.479802] env[61273]: DEBUG nova.network.neutron [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 761.480405] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] Expecting reply to msg 5bcde051e91a4f4ebc8bc56b4119e92b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 761.490554] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5bcde051e91a4f4ebc8bc56b4119e92b [ 761.702894] env[61273]: DEBUG oslo_concurrency.lockutils [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.834s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 761.703542] env[61273]: ERROR nova.compute.manager [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 0a0b4bd2-3d3f-440c-98f7-c40827ffa1df, please check neutron logs for more information. [ 761.703542] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] Traceback (most recent call last): [ 761.703542] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 761.703542] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] self.driver.spawn(context, instance, image_meta, [ 761.703542] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 761.703542] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] self._vmops.spawn(context, instance, image_meta, injected_files, [ 761.703542] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 761.703542] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] vm_ref = self.build_virtual_machine(instance, [ 761.703542] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 761.703542] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] vif_infos = vmwarevif.get_vif_info(self._session, [ 761.703542] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 761.703864] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] for vif in network_info: [ 761.703864] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 761.703864] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] return self._sync_wrapper(fn, *args, **kwargs) [ 761.703864] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 761.703864] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] self.wait() [ 761.703864] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 761.703864] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] self[:] = self._gt.wait() [ 761.703864] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 761.703864] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] return self._exit_event.wait() [ 761.703864] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 761.703864] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] result = hub.switch() [ 761.703864] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 761.703864] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] return self.greenlet.switch() [ 761.704213] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 761.704213] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] result = function(*args, **kwargs) [ 761.704213] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 761.704213] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] return func(*args, **kwargs) [ 761.704213] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 761.704213] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] raise e [ 761.704213] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 761.704213] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] nwinfo = self.network_api.allocate_for_instance( [ 761.704213] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 761.704213] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] created_port_ids = self._update_ports_for_instance( [ 761.704213] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 761.704213] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] with excutils.save_and_reraise_exception(): [ 761.704213] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 761.704543] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] self.force_reraise() [ 761.704543] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 761.704543] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] raise self.value [ 761.704543] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 761.704543] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] updated_port = self._update_port( [ 761.704543] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 761.704543] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] _ensure_no_port_binding_failure(port) [ 761.704543] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 761.704543] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] raise exception.PortBindingFailed(port_id=port['id']) [ 761.704543] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] nova.exception.PortBindingFailed: Binding failed for port 0a0b4bd2-3d3f-440c-98f7-c40827ffa1df, please check neutron logs for more information. [ 761.704543] env[61273]: ERROR nova.compute.manager [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] [ 761.704828] env[61273]: DEBUG nova.compute.utils [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] Binding failed for port 0a0b4bd2-3d3f-440c-98f7-c40827ffa1df, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 761.705601] env[61273]: DEBUG oslo_concurrency.lockutils [None req-227587c5-9ce8-4607-a942-f4ab390a602f tempest-SecurityGroupsTestJSON-905376977 tempest-SecurityGroupsTestJSON-905376977-project-member] Lock "799d4a06-f7a3-4b92-8e96-ac076848fdd3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 141.422s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 761.706033] env[61273]: DEBUG nova.compute.manager [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] Build of instance f3df4816-ef02-4ecc-a8ca-4f0eaf286218 was re-scheduled: Binding failed for port 0a0b4bd2-3d3f-440c-98f7-c40827ffa1df, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 761.706450] env[61273]: DEBUG nova.compute.manager [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 761.706675] env[61273]: DEBUG oslo_concurrency.lockutils [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Acquiring lock "refresh_cache-f3df4816-ef02-4ecc-a8ca-4f0eaf286218" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 761.706832] env[61273]: DEBUG oslo_concurrency.lockutils [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Acquired lock "refresh_cache-f3df4816-ef02-4ecc-a8ca-4f0eaf286218" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 761.707003] env[61273]: DEBUG nova.network.neutron [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 761.707402] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 1abaab82aac0498cb0a533c127f6f7ef in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 761.708120] env[61273]: DEBUG oslo_concurrency.lockutils [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 14.763s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 761.708300] env[61273]: DEBUG nova.objects.instance [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61273) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 761.709773] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Expecting reply to msg 047b859abba14d1d825d6300fd64e3a2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 761.710910] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] Expecting reply to msg 5ac1d5a394f446bda9990357c366f1d3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 761.718744] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1abaab82aac0498cb0a533c127f6f7ef [ 761.720949] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5ac1d5a394f446bda9990357c366f1d3 [ 761.736750] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 047b859abba14d1d825d6300fd64e3a2 [ 761.761337] env[61273]: DEBUG oslo_vmware.api [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Task: {'id': task-375337, 'name': Rename_Task, 'duration_secs': 0.141829} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.761606] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Powering on the VM {{(pid=61273) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 761.761863] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0c7a20ff-f66d-4510-89dd-54238c49e214 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.768334] env[61273]: DEBUG oslo_vmware.api [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Waiting for the task: (returnval){ [ 761.768334] env[61273]: value = "task-375338" [ 761.768334] env[61273]: _type = "Task" [ 761.768334] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.775669] env[61273]: DEBUG oslo_vmware.api [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Task: {'id': task-375338, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.982660] env[61273]: INFO nova.compute.manager [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] [instance: 21213cff-55b3-48fd-91b4-6718f7819bc3] Took 1.02 seconds to deallocate network for instance. [ 761.984616] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] Expecting reply to msg 36dfb012dc4441868431cc75a0879755 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 762.017076] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 36dfb012dc4441868431cc75a0879755 [ 762.216760] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Expecting reply to msg 0acc730969f1440098bdcf7571be5536 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 762.217747] env[61273]: DEBUG nova.compute.manager [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 762.224482] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] Expecting reply to msg 43e81c9d9d75454b8379bb6e523399c3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 762.225598] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0acc730969f1440098bdcf7571be5536 [ 762.233511] env[61273]: DEBUG nova.network.neutron [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 762.260985] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 43e81c9d9d75454b8379bb6e523399c3 [ 762.281861] env[61273]: DEBUG oslo_vmware.api [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Task: {'id': task-375338, 'name': PowerOnVM_Task} progress is 64%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.316805] env[61273]: DEBUG nova.network.neutron [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 762.317434] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg dd788a034e1e444fb28b479c5233eac0 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 762.326964] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dd788a034e1e444fb28b479c5233eac0 [ 762.489899] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] Expecting reply to msg 350851997ace48abb129d99a57c81a90 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 762.529053] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 350851997ace48abb129d99a57c81a90 [ 762.721567] env[61273]: DEBUG oslo_concurrency.lockutils [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 762.721963] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1e71974d-aa8d-41ca-a251-05df644e93d2 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Expecting reply to msg f3c7a886bd5448919a1edfc8d69b0e43 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 762.722914] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.746s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 762.724710] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] Expecting reply to msg 9d669dded51e454a9dbeb9abfb707379 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 762.746772] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f3c7a886bd5448919a1edfc8d69b0e43 [ 762.750054] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 762.779964] env[61273]: DEBUG oslo_vmware.api [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Task: {'id': task-375338, 'name': PowerOnVM_Task, 'duration_secs': 0.764255} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.780392] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Powered on the VM {{(pid=61273) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 762.780625] env[61273]: DEBUG nova.compute.manager [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Checking state {{(pid=61273) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 762.781462] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c96ca0aa-7a01-4c27-ad38-86321f4cbe74 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.790192] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Expecting reply to msg 404aab924cbe4edc875c7d451b0e9075 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 762.791692] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9d669dded51e454a9dbeb9abfb707379 [ 762.827358] env[61273]: DEBUG oslo_concurrency.lockutils [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Releasing lock "refresh_cache-f3df4816-ef02-4ecc-a8ca-4f0eaf286218" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 762.827605] env[61273]: DEBUG nova.compute.manager [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 762.827842] env[61273]: DEBUG nova.compute.manager [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 762.828078] env[61273]: DEBUG nova.network.neutron [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 762.845100] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 404aab924cbe4edc875c7d451b0e9075 [ 762.846455] env[61273]: DEBUG nova.network.neutron [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 762.847055] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 6ab50d922db4480c92b683b3494a3316 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 762.860742] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6ab50d922db4480c92b683b3494a3316 [ 763.016516] env[61273]: INFO nova.scheduler.client.report [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] Deleted allocations for instance 21213cff-55b3-48fd-91b4-6718f7819bc3 [ 763.022880] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] Expecting reply to msg 5772e64a235d439aa69cb4b2adfd77cc in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 763.040459] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5772e64a235d439aa69cb4b2adfd77cc [ 763.329010] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 763.349128] env[61273]: DEBUG nova.network.neutron [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 763.349640] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 6caebeacf3a3421b8f397f6d4fe96dff in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 763.361562] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6caebeacf3a3421b8f397f6d4fe96dff [ 763.509860] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0974494b-91d1-4331-92c4-0b7b702f6037 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.517532] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce6653aa-04f0-4ea6-8adc-8661c6681db9 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.557058] env[61273]: DEBUG oslo_concurrency.lockutils [None req-2157ae5d-80ef-4ab7-9582-3b72362f377b tempest-ServersV294TestFqdnHostnames-782809002 tempest-ServersV294TestFqdnHostnames-782809002-project-member] Lock "21213cff-55b3-48fd-91b4-6718f7819bc3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 134.384s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 763.558003] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Expecting reply to msg be4bab7e9de6456e8669a57ad6bc7bcf in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 763.560442] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3371221-fec2-4ec2-9323-eed4f5d4ff64 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.568382] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abb2f692-286e-43d3-ac7c-b17836d1d753 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.573330] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg be4bab7e9de6456e8669a57ad6bc7bcf [ 763.583738] env[61273]: DEBUG nova.compute.provider_tree [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 763.584313] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] Expecting reply to msg 880d7366b7f14336be6d6b77ed0c39e7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 763.591962] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 880d7366b7f14336be6d6b77ed0c39e7 [ 763.669411] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f2786bfc-4058-4688-bc00-570d55c66988 tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Expecting reply to msg d660efbeb34343c6ab2bbb3df347cfee in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 763.678970] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d660efbeb34343c6ab2bbb3df347cfee [ 763.851665] env[61273]: INFO nova.compute.manager [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: f3df4816-ef02-4ecc-a8ca-4f0eaf286218] Took 1.02 seconds to deallocate network for instance. [ 763.853436] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 1be873ee84694f988c9b0a793bc03f97 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 763.889416] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1be873ee84694f988c9b0a793bc03f97 [ 764.060166] env[61273]: DEBUG nova.compute.manager [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 764.062047] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Expecting reply to msg cd351af9f9c345e3837cab43ec6f864d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 764.086539] env[61273]: DEBUG nova.scheduler.client.report [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 764.089064] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] Expecting reply to msg 587b4b67ee5240838e2e90d82b47ad29 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 764.103291] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 587b4b67ee5240838e2e90d82b47ad29 [ 764.110094] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cd351af9f9c345e3837cab43ec6f864d [ 764.172180] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f2786bfc-4058-4688-bc00-570d55c66988 tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Acquiring lock "f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 764.172458] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f2786bfc-4058-4688-bc00-570d55c66988 tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Lock "f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 764.172689] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f2786bfc-4058-4688-bc00-570d55c66988 tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Acquiring lock "f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 764.172878] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f2786bfc-4058-4688-bc00-570d55c66988 tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Lock "f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 764.173042] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f2786bfc-4058-4688-bc00-570d55c66988 tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Lock "f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 764.175211] env[61273]: INFO nova.compute.manager [None req-f2786bfc-4058-4688-bc00-570d55c66988 tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Terminating instance [ 764.176916] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f2786bfc-4058-4688-bc00-570d55c66988 tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Acquiring lock "refresh_cache-f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 764.177061] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f2786bfc-4058-4688-bc00-570d55c66988 tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Acquired lock "refresh_cache-f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 764.177219] env[61273]: DEBUG nova.network.neutron [None req-f2786bfc-4058-4688-bc00-570d55c66988 tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 764.177608] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f2786bfc-4058-4688-bc00-570d55c66988 tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Expecting reply to msg 8e5a549e10aa410c8111bd60c6e41405 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 764.184282] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8e5a549e10aa410c8111bd60c6e41405 [ 764.359105] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 5ca96ca3c4f148cd92091f7ada3f0f9a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 764.394406] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5ca96ca3c4f148cd92091f7ada3f0f9a [ 764.582882] env[61273]: DEBUG oslo_concurrency.lockutils [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 764.592019] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.869s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 764.592701] env[61273]: ERROR nova.compute.manager [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 8af34a72-d61f-4969-b3b5-acef24d3c087, please check neutron logs for more information. [ 764.592701] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] Traceback (most recent call last): [ 764.592701] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 764.592701] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] self.driver.spawn(context, instance, image_meta, [ 764.592701] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 764.592701] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] self._vmops.spawn(context, instance, image_meta, injected_files, [ 764.592701] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 764.592701] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] vm_ref = self.build_virtual_machine(instance, [ 764.592701] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 764.592701] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] vif_infos = vmwarevif.get_vif_info(self._session, [ 764.592701] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 764.592978] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] for vif in network_info: [ 764.592978] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 764.592978] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] return self._sync_wrapper(fn, *args, **kwargs) [ 764.592978] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 764.592978] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] self.wait() [ 764.592978] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 764.592978] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] self[:] = self._gt.wait() [ 764.592978] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 764.592978] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] return self._exit_event.wait() [ 764.592978] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 764.592978] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] current.throw(*self._exc) [ 764.592978] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 764.592978] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] result = function(*args, **kwargs) [ 764.593257] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 764.593257] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] return func(*args, **kwargs) [ 764.593257] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 764.593257] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] raise e [ 764.593257] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 764.593257] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] nwinfo = self.network_api.allocate_for_instance( [ 764.593257] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 764.593257] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] created_port_ids = self._update_ports_for_instance( [ 764.593257] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 764.593257] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] with excutils.save_and_reraise_exception(): [ 764.593257] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 764.593257] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] self.force_reraise() [ 764.593257] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 764.593531] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] raise self.value [ 764.593531] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 764.593531] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] updated_port = self._update_port( [ 764.593531] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 764.593531] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] _ensure_no_port_binding_failure(port) [ 764.593531] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 764.593531] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] raise exception.PortBindingFailed(port_id=port['id']) [ 764.593531] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] nova.exception.PortBindingFailed: Binding failed for port 8af34a72-d61f-4969-b3b5-acef24d3c087, please check neutron logs for more information. [ 764.593531] env[61273]: ERROR nova.compute.manager [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] [ 764.593531] env[61273]: DEBUG nova.compute.utils [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] Binding failed for port 8af34a72-d61f-4969-b3b5-acef24d3c087, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 764.594546] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 15.726s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 764.602125] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 6980d937d24d49b8aff39c8921ec72ff in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 764.605705] env[61273]: DEBUG nova.compute.manager [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] Build of instance 2a7d4872-4ed7-4058-bc36-b199d89a9f14 was re-scheduled: Binding failed for port 8af34a72-d61f-4969-b3b5-acef24d3c087, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 764.606252] env[61273]: DEBUG nova.compute.manager [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 764.606454] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] Acquiring lock "refresh_cache-2a7d4872-4ed7-4058-bc36-b199d89a9f14" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 764.606603] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] Acquired lock "refresh_cache-2a7d4872-4ed7-4058-bc36-b199d89a9f14" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 764.607380] env[61273]: DEBUG nova.network.neutron [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 764.607953] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] Expecting reply to msg 762f2b9ecc4f479e83dfea34949e1413 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 764.622915] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 762f2b9ecc4f479e83dfea34949e1413 [ 764.628791] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6980d937d24d49b8aff39c8921ec72ff [ 764.704713] env[61273]: DEBUG nova.network.neutron [None req-f2786bfc-4058-4688-bc00-570d55c66988 tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 764.790033] env[61273]: DEBUG nova.network.neutron [None req-f2786bfc-4058-4688-bc00-570d55c66988 tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 764.790781] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f2786bfc-4058-4688-bc00-570d55c66988 tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Expecting reply to msg 217fd7ee2d694aaba243da4f0a504225 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 764.806439] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 217fd7ee2d694aaba243da4f0a504225 [ 764.896190] env[61273]: INFO nova.scheduler.client.report [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Deleted allocations for instance f3df4816-ef02-4ecc-a8ca-4f0eaf286218 [ 764.902225] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 3750be3a28a34a9fad99e10f20f8be8c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 764.932544] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3750be3a28a34a9fad99e10f20f8be8c [ 765.109087] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 38b829bcade84c9abc4f0cc4e0487046 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 765.125036] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 38b829bcade84c9abc4f0cc4e0487046 [ 765.142598] env[61273]: DEBUG nova.network.neutron [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 765.223963] env[61273]: DEBUG nova.network.neutron [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 765.224546] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] Expecting reply to msg fd53a6f62b6847dabfc8d853769d182f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 765.233479] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fd53a6f62b6847dabfc8d853769d182f [ 765.293005] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f2786bfc-4058-4688-bc00-570d55c66988 tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Releasing lock "refresh_cache-f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 765.293442] env[61273]: DEBUG nova.compute.manager [None req-f2786bfc-4058-4688-bc00-570d55c66988 tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 765.293615] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-f2786bfc-4058-4688-bc00-570d55c66988 tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 765.294524] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6ba22af-8783-48c6-9601-dd695f74e0eb {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.302321] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2786bfc-4058-4688-bc00-570d55c66988 tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Powering off the VM {{(pid=61273) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 765.302678] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c2c7adbd-30ba-470d-9ba2-db4913a768c9 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.309477] env[61273]: DEBUG oslo_vmware.api [None req-f2786bfc-4058-4688-bc00-570d55c66988 tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Waiting for the task: (returnval){ [ 765.309477] env[61273]: value = "task-375339" [ 765.309477] env[61273]: _type = "Task" [ 765.309477] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.321717] env[61273]: DEBUG oslo_vmware.api [None req-f2786bfc-4058-4688-bc00-570d55c66988 tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Task: {'id': task-375339, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.410287] env[61273]: DEBUG oslo_concurrency.lockutils [None req-bcb5091e-96cb-49b0-b234-1e19130786d1 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Lock "f3df4816-ef02-4ecc-a8ca-4f0eaf286218" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 135.940s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 765.412048] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Expecting reply to msg a895077419fa499794d8839e1da265f8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 765.421962] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a895077419fa499794d8839e1da265f8 [ 765.634634] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance ca8a38c7-a81c-407a-9558-3d15e492d9fa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 765.634814] env[61273]: WARNING nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 765.635394] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg f49efb32eede47a5b02fec2741e85677 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 765.645513] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f49efb32eede47a5b02fec2741e85677 [ 765.726509] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] Releasing lock "refresh_cache-2a7d4872-4ed7-4058-bc36-b199d89a9f14" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 765.726813] env[61273]: DEBUG nova.compute.manager [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 765.726922] env[61273]: DEBUG nova.compute.manager [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 765.727099] env[61273]: DEBUG nova.network.neutron [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 765.747195] env[61273]: DEBUG nova.network.neutron [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 765.748396] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] Expecting reply to msg 76735bc4bbfe4e298673e217147127fc in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 765.756784] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 76735bc4bbfe4e298673e217147127fc [ 765.819776] env[61273]: DEBUG oslo_vmware.api [None req-f2786bfc-4058-4688-bc00-570d55c66988 tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Task: {'id': task-375339, 'name': PowerOffVM_Task, 'duration_secs': 0.17871} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.820210] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2786bfc-4058-4688-bc00-570d55c66988 tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Powered off the VM {{(pid=61273) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 765.820266] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-f2786bfc-4058-4688-bc00-570d55c66988 tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Unregistering the VM {{(pid=61273) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 765.820468] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f3d062ff-da64-4a33-aaf2-92b158d50d52 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.843018] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-f2786bfc-4058-4688-bc00-570d55c66988 tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Unregistered the VM {{(pid=61273) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 765.843127] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-f2786bfc-4058-4688-bc00-570d55c66988 tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Deleting contents of the VM from datastore datastore1 {{(pid=61273) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 765.843248] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2786bfc-4058-4688-bc00-570d55c66988 tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Deleting the datastore file [datastore1] f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc {{(pid=61273) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 765.843518] env[61273]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3321c2cd-bd98-44a1-800c-2a230ca4aaf9 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.850015] env[61273]: DEBUG oslo_vmware.api [None req-f2786bfc-4058-4688-bc00-570d55c66988 tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Waiting for the task: (returnval){ [ 765.850015] env[61273]: value = "task-375341" [ 765.850015] env[61273]: _type = "Task" [ 765.850015] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.862908] env[61273]: DEBUG oslo_vmware.api [None req-f2786bfc-4058-4688-bc00-570d55c66988 tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Task: {'id': task-375341, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.919401] env[61273]: DEBUG nova.compute.manager [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 765.922175] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Expecting reply to msg 602c53a160314644b9300f6fbd84616d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 765.971299] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 602c53a160314644b9300f6fbd84616d [ 766.140161] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance 2a7d4872-4ed7-4058-bc36-b199d89a9f14 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 766.140321] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance c376b161-74f9-405a-bb86-516583a9a76f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 766.140446] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 766.140561] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance 2cd160c6-98ac-44a7-831e-d0fa3a958b99 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 766.140672] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance 82f77423-cee6-4a04-8463-cabe57cba9cf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 766.140782] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance a0a40c68-77e2-4152-ac2e-059f8f7a8f78 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 766.141346] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 4fbf0b281189427d8baccd5fb4c83b5e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 766.156068] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4fbf0b281189427d8baccd5fb4c83b5e [ 766.250621] env[61273]: DEBUG nova.network.neutron [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 766.250970] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] Expecting reply to msg 4ee4608248b5494b86401f650afd18ba in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 766.259357] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4ee4608248b5494b86401f650afd18ba [ 766.359710] env[61273]: DEBUG oslo_vmware.api [None req-f2786bfc-4058-4688-bc00-570d55c66988 tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Task: {'id': task-375341, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.089551} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.360113] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2786bfc-4058-4688-bc00-570d55c66988 tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Deleted the datastore file {{(pid=61273) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 766.360428] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-f2786bfc-4058-4688-bc00-570d55c66988 tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Deleted contents of the VM from datastore datastore1 {{(pid=61273) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 766.360714] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-f2786bfc-4058-4688-bc00-570d55c66988 tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 766.360991] env[61273]: INFO nova.compute.manager [None req-f2786bfc-4058-4688-bc00-570d55c66988 tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Took 1.07 seconds to destroy the instance on the hypervisor. [ 766.361333] env[61273]: DEBUG oslo.service.loopingcall [None req-f2786bfc-4058-4688-bc00-570d55c66988 tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 766.361628] env[61273]: DEBUG nova.compute.manager [-] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 766.361838] env[61273]: DEBUG nova.network.neutron [-] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 766.380093] env[61273]: DEBUG nova.network.neutron [-] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 766.380746] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 0a9c5fcc142243fd8a5cd5d0344ce97b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 766.390263] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0a9c5fcc142243fd8a5cd5d0344ce97b [ 766.440960] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 766.648168] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance e62c0b97-cfa7-4acf-bdc5-93d6996c7806 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 766.648728] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg ac4bb5f5776b4f64a73e9b20bd8fcd27 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 766.661334] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ac4bb5f5776b4f64a73e9b20bd8fcd27 [ 766.724580] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Acquiring lock "6494039f-3716-4174-92c0-15df384e0878" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 766.724815] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Lock "6494039f-3716-4174-92c0-15df384e0878" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 766.753798] env[61273]: INFO nova.compute.manager [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] [instance: 2a7d4872-4ed7-4058-bc36-b199d89a9f14] Took 1.03 seconds to deallocate network for instance. [ 766.755498] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] Expecting reply to msg e59b5cb4bb054400a51e5c7644249f63 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 766.803753] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e59b5cb4bb054400a51e5c7644249f63 [ 766.882870] env[61273]: DEBUG nova.network.neutron [-] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 766.883349] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 4daad9a6d5524bb0881e1973405e706b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 766.895829] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4daad9a6d5524bb0881e1973405e706b [ 767.151708] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance 7bfdc548-4f10-4525-9ea1-3781f90ca81d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 767.152048] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 090a428ed271452f90298f8188b1cd27 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 767.170863] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 090a428ed271452f90298f8188b1cd27 [ 767.260973] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] Expecting reply to msg 34e8c1b38ff742cdabfa106b5d2b68af in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 767.319691] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 34e8c1b38ff742cdabfa106b5d2b68af [ 767.381128] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Acquiring lock "9debd209-244f-472a-b9d6-cf63bba98839" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 767.381378] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Lock "9debd209-244f-472a-b9d6-cf63bba98839" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 767.388640] env[61273]: INFO nova.compute.manager [-] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Took 1.03 seconds to deallocate network for instance. [ 767.394819] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f2786bfc-4058-4688-bc00-570d55c66988 tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Expecting reply to msg 833531122d574a72bce7780f5bf295ad in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 767.434313] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 833531122d574a72bce7780f5bf295ad [ 767.654577] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance 0b400fe1-d0d0-4820-9f56-56ccbad5465a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 767.655252] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg ed6ef412eaf64a03b165bc5b08589417 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 767.666359] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ed6ef412eaf64a03b165bc5b08589417 [ 767.774938] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Acquiring lock "faaabf1e-74af-4cfa-ba1c-e2c2fabad124" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 767.774938] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Lock "faaabf1e-74af-4cfa-ba1c-e2c2fabad124" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 767.794310] env[61273]: INFO nova.scheduler.client.report [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] Deleted allocations for instance 2a7d4872-4ed7-4058-bc36-b199d89a9f14 [ 767.801302] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] Expecting reply to msg d083e509573e476287fe2d0300a52f8f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 767.818969] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d083e509573e476287fe2d0300a52f8f [ 767.897542] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f2786bfc-4058-4688-bc00-570d55c66988 tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 768.100865] env[61273]: DEBUG oslo_concurrency.lockutils [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Acquiring lock "98f63a99-f1b8-4420-978d-7b69c39a2692" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 768.100914] env[61273]: DEBUG oslo_concurrency.lockutils [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Lock "98f63a99-f1b8-4420-978d-7b69c39a2692" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 768.159615] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance e6108eed-93b4-40a5-a61b-67aa5bbe2fda has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 768.160623] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 903208930e4d4becb1bb288c2b390606 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 768.171299] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 903208930e4d4becb1bb288c2b390606 [ 768.304556] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7e037969-2989-4a4a-bbff-66de7775f64d tempest-InstanceActionsTestJSON-1354842284 tempest-InstanceActionsTestJSON-1354842284-project-member] Lock "2a7d4872-4ed7-4058-bc36-b199d89a9f14" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 134.428s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 768.305454] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Expecting reply to msg dfdbdf629af3461fa4bdcc00e5755a9b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 768.315354] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dfdbdf629af3461fa4bdcc00e5755a9b [ 768.663569] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance 05901bd4-2bad-405e-8e73-f6de4393a0f8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 768.664183] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg ffd7fb6124624775b5a65d72c5d7a2d3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 768.674433] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ffd7fb6124624775b5a65d72c5d7a2d3 [ 768.807752] env[61273]: DEBUG nova.compute.manager [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 768.809481] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Expecting reply to msg 74ccd0f8d7c64cdb9ae0e2184f74ea0c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 768.871701] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 74ccd0f8d7c64cdb9ae0e2184f74ea0c [ 769.166518] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance e8e826d4-2463-41a7-8c63-fd9f47eceea6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 769.167112] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 9ed64ff5f365435084cd81bf51d70a35 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 769.179086] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9ed64ff5f365435084cd81bf51d70a35 [ 769.329249] env[61273]: DEBUG oslo_concurrency.lockutils [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 769.671879] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance f6faf064-364d-4d24-9822-220bce47b3f0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 769.672490] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 91159ae9e0bd4a829023569f7d8ce4d0 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 769.683129] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 91159ae9e0bd4a829023569f7d8ce4d0 [ 769.759376] env[61273]: DEBUG oslo_concurrency.lockutils [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Acquiring lock "05df2575-9c3f-43d4-8fe4-52a808e11080" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 769.759606] env[61273]: DEBUG oslo_concurrency.lockutils [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Lock "05df2575-9c3f-43d4-8fe4-52a808e11080" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 769.805652] env[61273]: DEBUG oslo_concurrency.lockutils [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Acquiring lock "ae9866e2-544a-4d26-b198-87110f42f054" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 769.805819] env[61273]: DEBUG oslo_concurrency.lockutils [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Lock "ae9866e2-544a-4d26-b198-87110f42f054" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 769.926526] env[61273]: DEBUG oslo_concurrency.lockutils [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Acquiring lock "b720f9f1-9401-40b1-978b-9b8eefe712ea" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 769.926762] env[61273]: DEBUG oslo_concurrency.lockutils [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Lock "b720f9f1-9401-40b1-978b-9b8eefe712ea" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 770.175180] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance d63e20b1-e4ee-4c90-bc94-c4c05917fa1f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 770.175776] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 0eae3d6c657d43b692f2ce8ae3853e10 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 770.194121] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0eae3d6c657d43b692f2ce8ae3853e10 [ 770.678913] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance ebc03a5c-984f-4d58-abb0-da555adcfbac has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 770.679498] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg fcc2f598edc6439ea50f2b395d30ec7c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 770.691676] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fcc2f598edc6439ea50f2b395d30ec7c [ 771.186168] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 771.186750] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 2510deff6c4c4361bf817c7a76fb320b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 771.201739] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2510deff6c4c4361bf817c7a76fb320b [ 771.689312] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance fcdd594c-b89f-4d0b-a4d5-2644b3b62b56 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 771.689609] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=61273) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 771.689778] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=61273) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 772.007365] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cca29846-45da-4331-9075-a202421ac601 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.015132] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9815ac14-31be-4419-ab0e-9ce69f2d2da0 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.046307] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d9a4de6-7fe4-4783-b0ad-eb4761674a2e {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.053641] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29f6f819-3877-4261-b783-9403d5eca0c0 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.067153] env[61273]: DEBUG nova.compute.provider_tree [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 772.067691] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg f07f2498dc8f4b08b8e8df1de3bfe501 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 772.076390] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f07f2498dc8f4b08b8e8df1de3bfe501 [ 772.569961] env[61273]: DEBUG nova.scheduler.client.report [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 772.572396] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg f4317f2c506c4c7fba340cde0f481954 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 772.587660] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f4317f2c506c4c7fba340cde0f481954 [ 773.075109] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61273) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 773.075373] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 8.481s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 773.075643] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 23.598s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 773.077487] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 6e82d796e2934bd382e06690aa0d22b1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 773.078589] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 773.078743] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Cleaning up deleted instances {{(pid=61273) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11226}} [ 773.079247] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 0614d504328348cfa7007ac86109a834 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 773.100908] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0614d504328348cfa7007ac86109a834 [ 773.110653] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6e82d796e2934bd382e06690aa0d22b1 [ 773.585401] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] There are 3 instances to clean {{(pid=61273) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11235}} [ 773.585694] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] [instance: 109fc11e-d640-4617-99a3-0defe0a5aa6c] Instance has had 0 of 5 cleanup attempts {{(pid=61273) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11239}} [ 773.586716] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg bff2fa06a57243b08338397db2067974 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 773.620327] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bff2fa06a57243b08338397db2067974 [ 773.859737] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e794171-3677-4626-aeb5-925fa9665f54 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.867287] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cd00e0c-92da-49ff-b60c-3145f5a0794c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.897670] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a6b6553-e504-46dd-835b-b3d8d366dd3d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.904436] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-599b8053-fb23-460d-ad2e-21cc3ff801b3 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.916993] env[61273]: DEBUG nova.compute.provider_tree [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 773.917500] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg f6c6e688b780481ab8e7a8d91c7a6e06 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 773.930216] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f6c6e688b780481ab8e7a8d91c7a6e06 [ 774.089185] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] [instance: 62c3b24d-bee7-4dd2-a6c7-9303c7c28cca] Instance has had 0 of 5 cleanup attempts {{(pid=61273) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11239}} [ 774.090716] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 0d7b48338bb84febb76f5aee507174c8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 774.109979] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0d7b48338bb84febb76f5aee507174c8 [ 774.420709] env[61273]: DEBUG nova.scheduler.client.report [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 774.423160] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg da6d4f7a528547cca4718ced5c54775c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 774.435808] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg da6d4f7a528547cca4718ced5c54775c [ 774.595136] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] [instance: 4d75a1ec-6c11-4cdd-ba0e-aa6d3add80f2] Instance has had 0 of 5 cleanup attempts {{(pid=61273) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11239}} [ 774.595136] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 6ee9957f572041408cd55b77640a852c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 774.616856] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6ee9957f572041408cd55b77640a852c [ 774.926214] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.850s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 774.926819] env[61273]: ERROR nova.compute.manager [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: c376b161-74f9-405a-bb86-516583a9a76f] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 1798125d-2352-4879-a756-ef155a7b85e5, please check neutron logs for more information. [ 774.926819] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] Traceback (most recent call last): [ 774.926819] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 774.926819] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] self.driver.spawn(context, instance, image_meta, [ 774.926819] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 774.926819] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 774.926819] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 774.926819] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] vm_ref = self.build_virtual_machine(instance, [ 774.926819] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 774.926819] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] vif_infos = vmwarevif.get_vif_info(self._session, [ 774.926819] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 774.927167] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] for vif in network_info: [ 774.927167] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 774.927167] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] return self._sync_wrapper(fn, *args, **kwargs) [ 774.927167] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 774.927167] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] self.wait() [ 774.927167] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 774.927167] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] self[:] = self._gt.wait() [ 774.927167] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 774.927167] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] return self._exit_event.wait() [ 774.927167] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 774.927167] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] current.throw(*self._exc) [ 774.927167] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 774.927167] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] result = function(*args, **kwargs) [ 774.927526] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 774.927526] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] return func(*args, **kwargs) [ 774.927526] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 774.927526] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] raise e [ 774.927526] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 774.927526] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] nwinfo = self.network_api.allocate_for_instance( [ 774.927526] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 774.927526] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] created_port_ids = self._update_ports_for_instance( [ 774.927526] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 774.927526] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] with excutils.save_and_reraise_exception(): [ 774.927526] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 774.927526] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] self.force_reraise() [ 774.927526] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 774.927855] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] raise self.value [ 774.927855] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 774.927855] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] updated_port = self._update_port( [ 774.927855] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 774.927855] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] _ensure_no_port_binding_failure(port) [ 774.927855] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 774.927855] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] raise exception.PortBindingFailed(port_id=port['id']) [ 774.927855] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] nova.exception.PortBindingFailed: Binding failed for port 1798125d-2352-4879-a756-ef155a7b85e5, please check neutron logs for more information. [ 774.927855] env[61273]: ERROR nova.compute.manager [instance: c376b161-74f9-405a-bb86-516583a9a76f] [ 774.927855] env[61273]: DEBUG nova.compute.utils [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: c376b161-74f9-405a-bb86-516583a9a76f] Binding failed for port 1798125d-2352-4879-a756-ef155a7b85e5, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 774.928870] env[61273]: DEBUG oslo_concurrency.lockutils [None req-2c077140-4e31-409a-a435-6faef6bfe635 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.076s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 774.929066] env[61273]: DEBUG oslo_concurrency.lockutils [None req-2c077140-4e31-409a-a435-6faef6bfe635 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 774.931052] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.394s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 774.932687] env[61273]: INFO nova.compute.claims [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 774.934332] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Expecting reply to msg e40f2ac1bceb4520b1459b2b3ebd8d46 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 774.935535] env[61273]: DEBUG nova.compute.manager [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: c376b161-74f9-405a-bb86-516583a9a76f] Build of instance c376b161-74f9-405a-bb86-516583a9a76f was re-scheduled: Binding failed for port 1798125d-2352-4879-a756-ef155a7b85e5, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 774.935957] env[61273]: DEBUG nova.compute.manager [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: c376b161-74f9-405a-bb86-516583a9a76f] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 774.936198] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Acquiring lock "refresh_cache-c376b161-74f9-405a-bb86-516583a9a76f" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 774.936365] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Acquired lock "refresh_cache-c376b161-74f9-405a-bb86-516583a9a76f" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 774.936505] env[61273]: DEBUG nova.network.neutron [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: c376b161-74f9-405a-bb86-516583a9a76f] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 774.936899] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 28fc8b73f9e849ce8882d6cedf05e9aa in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 774.975489] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e40f2ac1bceb4520b1459b2b3ebd8d46 [ 774.976702] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 28fc8b73f9e849ce8882d6cedf05e9aa [ 775.086939] env[61273]: INFO nova.scheduler.client.report [None req-2c077140-4e31-409a-a435-6faef6bfe635 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Deleted allocations for instance 1efd9cfe-3a0c-412c-aa44-3bf650d08f9d [ 775.089430] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-2c077140-4e31-409a-a435-6faef6bfe635 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Expecting reply to msg 4205c93cba9e4a7392b91c45f36c7e38 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 775.097022] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 775.097252] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Cleaning up deleted instances with incomplete migration {{(pid=61273) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11264}} [ 775.097600] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg fd6029c2d7e141928d123483fb84ae4e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 775.107127] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fd6029c2d7e141928d123483fb84ae4e [ 775.128370] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4205c93cba9e4a7392b91c45f36c7e38 [ 775.442072] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Expecting reply to msg 2b443d6eca85419a8e79f41e28aee57f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 775.453485] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2b443d6eca85419a8e79f41e28aee57f [ 775.457845] env[61273]: DEBUG nova.network.neutron [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: c376b161-74f9-405a-bb86-516583a9a76f] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 775.526486] env[61273]: DEBUG nova.network.neutron [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: c376b161-74f9-405a-bb86-516583a9a76f] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 775.526997] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg e1f6bba201a84906b8afd3b4ef389f4e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 775.534762] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e1f6bba201a84906b8afd3b4ef389f4e [ 775.593366] env[61273]: DEBUG oslo_concurrency.lockutils [None req-2c077140-4e31-409a-a435-6faef6bfe635 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Lock "1efd9cfe-3a0c-412c-aa44-3bf650d08f9d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.415s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 775.593828] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-2c077140-4e31-409a-a435-6faef6bfe635 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Expecting reply to msg ecbab1d3a678418fafa4ccc0d7d51ae4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 775.599096] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 775.599359] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg db071109a16548a0b427d5a62a9f493e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 775.605202] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg db071109a16548a0b427d5a62a9f493e [ 775.607547] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ecbab1d3a678418fafa4ccc0d7d51ae4 [ 775.965410] env[61273]: DEBUG nova.scheduler.client.report [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Refreshing inventories for resource provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 775.980593] env[61273]: DEBUG nova.scheduler.client.report [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Updating ProviderTree inventory for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 775.980833] env[61273]: DEBUG nova.compute.provider_tree [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Updating inventory in ProviderTree for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 775.991962] env[61273]: DEBUG nova.scheduler.client.report [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Refreshing aggregate associations for resource provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb, aggregates: None {{(pid=61273) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 776.009007] env[61273]: DEBUG nova.scheduler.client.report [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Refreshing trait associations for resource provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=61273) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 776.029404] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Releasing lock "refresh_cache-c376b161-74f9-405a-bb86-516583a9a76f" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 776.029642] env[61273]: DEBUG nova.compute.manager [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 776.029819] env[61273]: DEBUG nova.compute.manager [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: c376b161-74f9-405a-bb86-516583a9a76f] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 776.029982] env[61273]: DEBUG nova.network.neutron [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: c376b161-74f9-405a-bb86-516583a9a76f] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 776.044347] env[61273]: DEBUG nova.network.neutron [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: c376b161-74f9-405a-bb86-516583a9a76f] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 776.044994] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 4f99e0fe05654b55a85fca20eba6c180 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 776.052490] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4f99e0fe05654b55a85fca20eba6c180 [ 776.106644] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e0c40bbe-42f3-4daf-8cad-399815609a62 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Expecting reply to msg 19879747c5224b89a86d34b8562a1d89 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 776.116775] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 19879747c5224b89a86d34b8562a1d89 [ 776.277646] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf73de43-d7cd-4f2b-a32c-76e062214608 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.285299] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bdad35b-5388-4fe1-b88c-0172b5d977f6 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.313962] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66daa6b8-f579-45e7-9350-3c899c272b63 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.321040] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3f03652-a6a3-4767-ae7a-bfa8b270e6c1 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.334520] env[61273]: DEBUG nova.compute.provider_tree [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 776.335024] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Expecting reply to msg b71b064c334b433b99a063e0ac0a6e24 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 776.343154] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b71b064c334b433b99a063e0ac0a6e24 [ 776.548296] env[61273]: DEBUG nova.network.neutron [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: c376b161-74f9-405a-bb86-516583a9a76f] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 776.548296] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 13c833bb38934d6593ec59b636db3d4d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 776.557453] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 13c833bb38934d6593ec59b636db3d4d [ 776.612883] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e0c40bbe-42f3-4daf-8cad-399815609a62 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Acquiring lock "ca8a38c7-a81c-407a-9558-3d15e492d9fa" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 776.612883] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e0c40bbe-42f3-4daf-8cad-399815609a62 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Lock "ca8a38c7-a81c-407a-9558-3d15e492d9fa" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 776.612883] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e0c40bbe-42f3-4daf-8cad-399815609a62 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Acquiring lock "ca8a38c7-a81c-407a-9558-3d15e492d9fa-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 776.612883] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e0c40bbe-42f3-4daf-8cad-399815609a62 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Lock "ca8a38c7-a81c-407a-9558-3d15e492d9fa-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 776.613323] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e0c40bbe-42f3-4daf-8cad-399815609a62 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Lock "ca8a38c7-a81c-407a-9558-3d15e492d9fa-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 776.613872] env[61273]: INFO nova.compute.manager [None req-e0c40bbe-42f3-4daf-8cad-399815609a62 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: ca8a38c7-a81c-407a-9558-3d15e492d9fa] Terminating instance [ 776.615708] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e0c40bbe-42f3-4daf-8cad-399815609a62 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Acquiring lock "refresh_cache-ca8a38c7-a81c-407a-9558-3d15e492d9fa" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 776.615989] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e0c40bbe-42f3-4daf-8cad-399815609a62 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Acquired lock "refresh_cache-ca8a38c7-a81c-407a-9558-3d15e492d9fa" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.616281] env[61273]: DEBUG nova.network.neutron [None req-e0c40bbe-42f3-4daf-8cad-399815609a62 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: ca8a38c7-a81c-407a-9558-3d15e492d9fa] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 776.616774] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e0c40bbe-42f3-4daf-8cad-399815609a62 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Expecting reply to msg 6791c68e929b4fff8b559724170dbfe3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 776.623611] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6791c68e929b4fff8b559724170dbfe3 [ 776.839806] env[61273]: DEBUG nova.scheduler.client.report [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 776.840984] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Expecting reply to msg 9a17cab2d025416795d3d46be7f79776 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 776.853296] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9a17cab2d025416795d3d46be7f79776 [ 777.050875] env[61273]: INFO nova.compute.manager [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: c376b161-74f9-405a-bb86-516583a9a76f] Took 1.02 seconds to deallocate network for instance. [ 777.052432] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 5826f656705946978f9523aea7923ede in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 777.085707] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5826f656705946978f9523aea7923ede [ 777.135655] env[61273]: DEBUG nova.network.neutron [None req-e0c40bbe-42f3-4daf-8cad-399815609a62 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: ca8a38c7-a81c-407a-9558-3d15e492d9fa] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 777.192741] env[61273]: DEBUG nova.network.neutron [None req-e0c40bbe-42f3-4daf-8cad-399815609a62 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: ca8a38c7-a81c-407a-9558-3d15e492d9fa] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 777.193329] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e0c40bbe-42f3-4daf-8cad-399815609a62 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Expecting reply to msg d3499cc48e544293a31ded45e15ce8fb in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 777.202179] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d3499cc48e544293a31ded45e15ce8fb [ 777.344834] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.414s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 777.345381] env[61273]: DEBUG nova.compute.manager [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 777.347183] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Expecting reply to msg 1a3dd0d581d2490fb36dc6c90306e62b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 777.348254] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.545s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 777.355568] env[61273]: INFO nova.compute.claims [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 777.355568] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Expecting reply to msg 1c747cf245b84a73887c279222c17c48 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 777.379971] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1a3dd0d581d2490fb36dc6c90306e62b [ 777.385012] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1c747cf245b84a73887c279222c17c48 [ 777.556976] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 58edd3e536dc4b8f8008818aebc6e5cb in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 777.597356] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 58edd3e536dc4b8f8008818aebc6e5cb [ 777.695770] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e0c40bbe-42f3-4daf-8cad-399815609a62 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Releasing lock "refresh_cache-ca8a38c7-a81c-407a-9558-3d15e492d9fa" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 777.696268] env[61273]: DEBUG nova.compute.manager [None req-e0c40bbe-42f3-4daf-8cad-399815609a62 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: ca8a38c7-a81c-407a-9558-3d15e492d9fa] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 777.696460] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-e0c40bbe-42f3-4daf-8cad-399815609a62 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: ca8a38c7-a81c-407a-9558-3d15e492d9fa] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 777.697351] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47051896-b924-4522-975c-036661461120 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.705543] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0c40bbe-42f3-4daf-8cad-399815609a62 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: ca8a38c7-a81c-407a-9558-3d15e492d9fa] Powering off the VM {{(pid=61273) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 777.705796] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-68d9fa2a-39d2-42ef-9d24-6b51c2ed1d8b {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.712704] env[61273]: DEBUG oslo_vmware.api [None req-e0c40bbe-42f3-4daf-8cad-399815609a62 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Waiting for the task: (returnval){ [ 777.712704] env[61273]: value = "task-375342" [ 777.712704] env[61273]: _type = "Task" [ 777.712704] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.721043] env[61273]: DEBUG oslo_vmware.api [None req-e0c40bbe-42f3-4daf-8cad-399815609a62 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Task: {'id': task-375342, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.855442] env[61273]: DEBUG nova.compute.utils [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 777.860028] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Expecting reply to msg 4065eeaebc8444fca908e0d4b6b93bd9 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 777.860028] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Expecting reply to msg 14b00aa6cd0b414aa3f75600aa3022e5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 777.860028] env[61273]: DEBUG nova.compute.manager [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 777.860028] env[61273]: DEBUG nova.network.neutron [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 777.869144] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4065eeaebc8444fca908e0d4b6b93bd9 [ 777.870761] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 14b00aa6cd0b414aa3f75600aa3022e5 [ 777.901040] env[61273]: DEBUG nova.policy [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b718b811588443bfbed219e81ebb2c23', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ebdbfbef269c4ebcb1c5604a5f93268d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 778.081061] env[61273]: INFO nova.scheduler.client.report [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Deleted allocations for instance c376b161-74f9-405a-bb86-516583a9a76f [ 778.087099] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 9cc479b14191452ca640eaa1acf07cea in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 778.100143] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9cc479b14191452ca640eaa1acf07cea [ 778.224261] env[61273]: DEBUG oslo_vmware.api [None req-e0c40bbe-42f3-4daf-8cad-399815609a62 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Task: {'id': task-375342, 'name': PowerOffVM_Task, 'duration_secs': 0.194236} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.224623] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0c40bbe-42f3-4daf-8cad-399815609a62 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: ca8a38c7-a81c-407a-9558-3d15e492d9fa] Powered off the VM {{(pid=61273) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 778.224912] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-e0c40bbe-42f3-4daf-8cad-399815609a62 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: ca8a38c7-a81c-407a-9558-3d15e492d9fa] Unregistering the VM {{(pid=61273) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 778.225186] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d536dffb-efcd-4bcb-9453-2adec15d5ed8 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.249344] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-e0c40bbe-42f3-4daf-8cad-399815609a62 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: ca8a38c7-a81c-407a-9558-3d15e492d9fa] Unregistered the VM {{(pid=61273) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 778.249700] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-e0c40bbe-42f3-4daf-8cad-399815609a62 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: ca8a38c7-a81c-407a-9558-3d15e492d9fa] Deleting contents of the VM from datastore datastore2 {{(pid=61273) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 778.249953] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0c40bbe-42f3-4daf-8cad-399815609a62 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Deleting the datastore file [datastore2] ca8a38c7-a81c-407a-9558-3d15e492d9fa {{(pid=61273) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 778.250264] env[61273]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1ebf24d8-5fa8-435f-9fa5-af5eea5850bd {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.252804] env[61273]: DEBUG nova.network.neutron [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] Successfully created port: bba752d6-1653-445d-af20-3f45b1d50fbc {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 778.258472] env[61273]: DEBUG oslo_vmware.api [None req-e0c40bbe-42f3-4daf-8cad-399815609a62 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Waiting for the task: (returnval){ [ 778.258472] env[61273]: value = "task-375344" [ 778.258472] env[61273]: _type = "Task" [ 778.258472] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.279612] env[61273]: DEBUG oslo_vmware.api [None req-e0c40bbe-42f3-4daf-8cad-399815609a62 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Task: {'id': task-375344, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.360974] env[61273]: DEBUG nova.compute.manager [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 778.362909] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Expecting reply to msg 845e3015845e4dfda511aaf9d6fbc660 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 778.406571] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 845e3015845e4dfda511aaf9d6fbc660 [ 778.589257] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3802985d-bbb7-4539-94f4-f7f92ad3a1da tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Lock "c376b161-74f9-405a-bb86-516583a9a76f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 127.843s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 778.589905] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] Expecting reply to msg 8faa9e07e104477e85f28deb0575fdc4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 778.601317] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8faa9e07e104477e85f28deb0575fdc4 [ 778.706862] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c7d9667-fde5-4008-96d8-14524acfd2af {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.714722] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc9894be-ad53-451b-b7fb-59bebf0b588f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.747029] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e43dd335-9aed-4a3e-bbd6-77de8eb28d5f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.754535] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c69bb334-2ee6-46fa-9b64-657183a3067c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.770032] env[61273]: DEBUG nova.compute.provider_tree [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 778.770565] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Expecting reply to msg 5af08758b179474599bb91ef1b604c20 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 778.775827] env[61273]: DEBUG oslo_vmware.api [None req-e0c40bbe-42f3-4daf-8cad-399815609a62 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Task: {'id': task-375344, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.08908} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.776061] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0c40bbe-42f3-4daf-8cad-399815609a62 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Deleted the datastore file {{(pid=61273) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 778.776249] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-e0c40bbe-42f3-4daf-8cad-399815609a62 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: ca8a38c7-a81c-407a-9558-3d15e492d9fa] Deleted contents of the VM from datastore datastore2 {{(pid=61273) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 778.776429] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-e0c40bbe-42f3-4daf-8cad-399815609a62 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: ca8a38c7-a81c-407a-9558-3d15e492d9fa] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 778.776595] env[61273]: INFO nova.compute.manager [None req-e0c40bbe-42f3-4daf-8cad-399815609a62 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] [instance: ca8a38c7-a81c-407a-9558-3d15e492d9fa] Took 1.08 seconds to destroy the instance on the hypervisor. [ 778.776863] env[61273]: DEBUG oslo.service.loopingcall [None req-e0c40bbe-42f3-4daf-8cad-399815609a62 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 778.777053] env[61273]: DEBUG nova.compute.manager [-] [instance: ca8a38c7-a81c-407a-9558-3d15e492d9fa] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 778.777145] env[61273]: DEBUG nova.network.neutron [-] [instance: ca8a38c7-a81c-407a-9558-3d15e492d9fa] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 778.779392] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5af08758b179474599bb91ef1b604c20 [ 778.795006] env[61273]: DEBUG nova.network.neutron [-] [instance: ca8a38c7-a81c-407a-9558-3d15e492d9fa] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 778.795552] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 0f640a5dba0b46dd9bfdf41e0e42cc0f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 778.806611] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0f640a5dba0b46dd9bfdf41e0e42cc0f [ 778.868022] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Expecting reply to msg 24bfe44b94cb451ca6bd20327d7a7355 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 778.932633] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 24bfe44b94cb451ca6bd20327d7a7355 [ 779.058509] env[61273]: DEBUG nova.compute.manager [req-2ba743b0-a659-4fa9-8242-55118220eaf8 req-83bad84e-b0dc-4183-9cfd-e77d0eeb5026 service nova] [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] Received event network-changed-bba752d6-1653-445d-af20-3f45b1d50fbc {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 779.058711] env[61273]: DEBUG nova.compute.manager [req-2ba743b0-a659-4fa9-8242-55118220eaf8 req-83bad84e-b0dc-4183-9cfd-e77d0eeb5026 service nova] [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] Refreshing instance network info cache due to event network-changed-bba752d6-1653-445d-af20-3f45b1d50fbc. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 779.062079] env[61273]: DEBUG oslo_concurrency.lockutils [req-2ba743b0-a659-4fa9-8242-55118220eaf8 req-83bad84e-b0dc-4183-9cfd-e77d0eeb5026 service nova] Acquiring lock "refresh_cache-e62c0b97-cfa7-4acf-bdc5-93d6996c7806" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 779.062079] env[61273]: DEBUG oslo_concurrency.lockutils [req-2ba743b0-a659-4fa9-8242-55118220eaf8 req-83bad84e-b0dc-4183-9cfd-e77d0eeb5026 service nova] Acquired lock "refresh_cache-e62c0b97-cfa7-4acf-bdc5-93d6996c7806" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 779.062079] env[61273]: DEBUG nova.network.neutron [req-2ba743b0-a659-4fa9-8242-55118220eaf8 req-83bad84e-b0dc-4183-9cfd-e77d0eeb5026 service nova] [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] Refreshing network info cache for port bba752d6-1653-445d-af20-3f45b1d50fbc {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 779.062079] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-2ba743b0-a659-4fa9-8242-55118220eaf8 req-83bad84e-b0dc-4183-9cfd-e77d0eeb5026 service nova] Expecting reply to msg 8240dfe62ba54b3fbe452b15e06830b1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 779.066372] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8240dfe62ba54b3fbe452b15e06830b1 [ 779.094692] env[61273]: DEBUG nova.compute.manager [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] [instance: f6faf064-364d-4d24-9822-220bce47b3f0] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 779.096337] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] Expecting reply to msg 20c68736894e494b9b40e588cf8e1eb4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 779.131335] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 20c68736894e494b9b40e588cf8e1eb4 [ 779.178684] env[61273]: ERROR nova.compute.manager [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port bba752d6-1653-445d-af20-3f45b1d50fbc, please check neutron logs for more information. [ 779.178684] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 779.178684] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 779.178684] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 779.178684] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 779.178684] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 779.178684] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 779.178684] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 779.178684] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 779.178684] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 779.178684] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 779.178684] env[61273]: ERROR nova.compute.manager raise self.value [ 779.178684] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 779.178684] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 779.178684] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 779.178684] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 779.179080] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 779.179080] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 779.179080] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port bba752d6-1653-445d-af20-3f45b1d50fbc, please check neutron logs for more information. [ 779.179080] env[61273]: ERROR nova.compute.manager [ 779.179080] env[61273]: Traceback (most recent call last): [ 779.179080] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 779.179080] env[61273]: listener.cb(fileno) [ 779.179080] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 779.179080] env[61273]: result = function(*args, **kwargs) [ 779.179080] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 779.179080] env[61273]: return func(*args, **kwargs) [ 779.179080] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 779.179080] env[61273]: raise e [ 779.179080] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 779.179080] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 779.179080] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 779.179080] env[61273]: created_port_ids = self._update_ports_for_instance( [ 779.179080] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 779.179080] env[61273]: with excutils.save_and_reraise_exception(): [ 779.179080] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 779.179080] env[61273]: self.force_reraise() [ 779.179080] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 779.179080] env[61273]: raise self.value [ 779.179080] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 779.179080] env[61273]: updated_port = self._update_port( [ 779.179080] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 779.179080] env[61273]: _ensure_no_port_binding_failure(port) [ 779.179080] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 779.179080] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 779.179843] env[61273]: nova.exception.PortBindingFailed: Binding failed for port bba752d6-1653-445d-af20-3f45b1d50fbc, please check neutron logs for more information. [ 779.179843] env[61273]: Removing descriptor: 15 [ 779.274062] env[61273]: DEBUG nova.scheduler.client.report [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 779.276637] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Expecting reply to msg 56e6fd79e9f7409eb3c6f260abadbf02 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 779.291104] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 56e6fd79e9f7409eb3c6f260abadbf02 [ 779.298228] env[61273]: DEBUG nova.network.neutron [-] [instance: ca8a38c7-a81c-407a-9558-3d15e492d9fa] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 779.298613] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 7bcf4dce2f3f4cc38cb9f3303a02f7a9 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 779.308453] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7bcf4dce2f3f4cc38cb9f3303a02f7a9 [ 779.370855] env[61273]: DEBUG nova.compute.manager [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 779.391344] env[61273]: DEBUG nova.virt.hardware [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 779.391579] env[61273]: DEBUG nova.virt.hardware [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 779.391758] env[61273]: DEBUG nova.virt.hardware [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 779.391944] env[61273]: DEBUG nova.virt.hardware [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 779.392120] env[61273]: DEBUG nova.virt.hardware [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 779.392268] env[61273]: DEBUG nova.virt.hardware [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 779.392463] env[61273]: DEBUG nova.virt.hardware [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 779.392611] env[61273]: DEBUG nova.virt.hardware [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 779.392765] env[61273]: DEBUG nova.virt.hardware [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 779.392915] env[61273]: DEBUG nova.virt.hardware [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 779.393078] env[61273]: DEBUG nova.virt.hardware [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 779.393954] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8cca53e-593b-4a4c-ade7-81dc287861b8 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.401960] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54ecef77-7807-4a4a-8e3e-f59b221a429c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.417461] env[61273]: ERROR nova.compute.manager [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port bba752d6-1653-445d-af20-3f45b1d50fbc, please check neutron logs for more information. [ 779.417461] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] Traceback (most recent call last): [ 779.417461] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 779.417461] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] yield resources [ 779.417461] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 779.417461] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] self.driver.spawn(context, instance, image_meta, [ 779.417461] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 779.417461] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] self._vmops.spawn(context, instance, image_meta, injected_files, [ 779.417461] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 779.417461] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] vm_ref = self.build_virtual_machine(instance, [ 779.417461] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 779.417794] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] vif_infos = vmwarevif.get_vif_info(self._session, [ 779.417794] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 779.417794] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] for vif in network_info: [ 779.417794] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 779.417794] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] return self._sync_wrapper(fn, *args, **kwargs) [ 779.417794] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 779.417794] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] self.wait() [ 779.417794] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 779.417794] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] self[:] = self._gt.wait() [ 779.417794] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 779.417794] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] return self._exit_event.wait() [ 779.417794] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 779.417794] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] current.throw(*self._exc) [ 779.418190] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 779.418190] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] result = function(*args, **kwargs) [ 779.418190] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 779.418190] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] return func(*args, **kwargs) [ 779.418190] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 779.418190] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] raise e [ 779.418190] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 779.418190] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] nwinfo = self.network_api.allocate_for_instance( [ 779.418190] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 779.418190] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] created_port_ids = self._update_ports_for_instance( [ 779.418190] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 779.418190] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] with excutils.save_and_reraise_exception(): [ 779.418190] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 779.418482] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] self.force_reraise() [ 779.418482] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 779.418482] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] raise self.value [ 779.418482] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 779.418482] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] updated_port = self._update_port( [ 779.418482] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 779.418482] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] _ensure_no_port_binding_failure(port) [ 779.418482] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 779.418482] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] raise exception.PortBindingFailed(port_id=port['id']) [ 779.418482] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] nova.exception.PortBindingFailed: Binding failed for port bba752d6-1653-445d-af20-3f45b1d50fbc, please check neutron logs for more information. [ 779.418482] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] [ 779.418482] env[61273]: INFO nova.compute.manager [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] Terminating instance [ 779.419760] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Acquiring lock "refresh_cache-e62c0b97-cfa7-4acf-bdc5-93d6996c7806" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 779.578812] env[61273]: DEBUG nova.network.neutron [req-2ba743b0-a659-4fa9-8242-55118220eaf8 req-83bad84e-b0dc-4183-9cfd-e77d0eeb5026 service nova] [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 779.614647] env[61273]: DEBUG oslo_concurrency.lockutils [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 779.657031] env[61273]: DEBUG nova.network.neutron [req-2ba743b0-a659-4fa9-8242-55118220eaf8 req-83bad84e-b0dc-4183-9cfd-e77d0eeb5026 service nova] [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 779.657568] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-2ba743b0-a659-4fa9-8242-55118220eaf8 req-83bad84e-b0dc-4183-9cfd-e77d0eeb5026 service nova] Expecting reply to msg 85f73bfb0f834de9aa09544e74a1f5f2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 779.666948] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 85f73bfb0f834de9aa09544e74a1f5f2 [ 779.779527] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.431s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 779.780189] env[61273]: DEBUG nova.compute.manager [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 779.782955] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Expecting reply to msg f5ad3ff1aa3641dda7f9adf2b9651500 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 779.783740] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 23.149s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 779.785543] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] Expecting reply to msg bf465ce7a23b42d7816a407bcab9fb74 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 779.800502] env[61273]: INFO nova.compute.manager [-] [instance: ca8a38c7-a81c-407a-9558-3d15e492d9fa] Took 1.02 seconds to deallocate network for instance. [ 779.806137] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e0c40bbe-42f3-4daf-8cad-399815609a62 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Expecting reply to msg f4d37f6832024606b5c741a6370e7df7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 779.834215] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f5ad3ff1aa3641dda7f9adf2b9651500 [ 779.837612] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bf465ce7a23b42d7816a407bcab9fb74 [ 779.864562] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f4d37f6832024606b5c741a6370e7df7 [ 780.086225] env[61273]: DEBUG oslo_concurrency.lockutils [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Acquiring lock "9952d347-2ca7-48f2-8ee1-dc1d767402dc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 780.086462] env[61273]: DEBUG oslo_concurrency.lockutils [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Lock "9952d347-2ca7-48f2-8ee1-dc1d767402dc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 780.160213] env[61273]: DEBUG oslo_concurrency.lockutils [req-2ba743b0-a659-4fa9-8242-55118220eaf8 req-83bad84e-b0dc-4183-9cfd-e77d0eeb5026 service nova] Releasing lock "refresh_cache-e62c0b97-cfa7-4acf-bdc5-93d6996c7806" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 780.160597] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Acquired lock "refresh_cache-e62c0b97-cfa7-4acf-bdc5-93d6996c7806" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 780.160783] env[61273]: DEBUG nova.network.neutron [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 780.161222] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Expecting reply to msg 233ccc6be7274eb495979c3ed4ef268c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 780.168569] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 233ccc6be7274eb495979c3ed4ef268c [ 780.288872] env[61273]: DEBUG nova.compute.utils [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 780.289534] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Expecting reply to msg 0989a85d08dc4d5e9315252af62a393e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 780.293599] env[61273]: DEBUG nova.compute.manager [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 780.293761] env[61273]: DEBUG nova.network.neutron [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 780.301513] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0989a85d08dc4d5e9315252af62a393e [ 780.308730] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e0c40bbe-42f3-4daf-8cad-399815609a62 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 780.351663] env[61273]: DEBUG nova.policy [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2aafa17161df48feb736cef1e2ab52fd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b6946d58a1af45eb96597fef8e0c62f5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 780.593900] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cbbaf59-6719-4bd4-b0cc-de059aa6bea7 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.601775] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f32976d-2d08-484f-a605-618863a4e4fc {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.334900] env[61273]: DEBUG nova.compute.manager [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 781.336556] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Expecting reply to msg 691aac53d4674b8ba0bfb10434156cb0 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 781.339810] env[61273]: DEBUG nova.network.neutron [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] Successfully created port: e8d4b3f1-4266-42ad-a1e2-e0967aa5c4c2 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 781.341751] env[61273]: DEBUG nova.compute.manager [req-d78ed2f1-4311-4c1a-a9e2-cb31a48926b8 req-cfc9518e-fdf1-4396-9015-7569386984e0 service nova] [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] Received event network-vif-deleted-bba752d6-1653-445d-af20-3f45b1d50fbc {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 781.342518] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3998ac09-a409-4ec4-b6c0-42f3a9c424fa {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.350923] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-949e76db-b283-4cfc-97ec-86e7c064bb44 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.365913] env[61273]: DEBUG nova.compute.provider_tree [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 781.366445] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] Expecting reply to msg 5b2c95b90ea842bfa6c7936255b6b6c6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 781.367828] env[61273]: DEBUG nova.network.neutron [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 781.375822] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 691aac53d4674b8ba0bfb10434156cb0 [ 781.376368] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5b2c95b90ea842bfa6c7936255b6b6c6 [ 781.432914] env[61273]: DEBUG nova.network.neutron [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 781.433411] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Expecting reply to msg 3a50f7b5b6284789874e890c5cc31b22 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 781.440996] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3a50f7b5b6284789874e890c5cc31b22 [ 781.584101] env[61273]: DEBUG nova.network.neutron [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] Successfully created port: f38b658e-86ba-4918-a9da-40d3cb38c4cc {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 781.820740] env[61273]: DEBUG nova.network.neutron [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] Successfully created port: d56e6aa6-5ce3-4942-962f-01a580a6620b {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 781.846799] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Expecting reply to msg 263922e6453744d68533bbe01242001c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 781.871064] env[61273]: DEBUG nova.scheduler.client.report [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 781.873521] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] Expecting reply to msg 1aed2a02b9cc47319c479dbd69374a73 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 781.881508] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 263922e6453744d68533bbe01242001c [ 781.884569] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1aed2a02b9cc47319c479dbd69374a73 [ 781.936271] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Releasing lock "refresh_cache-e62c0b97-cfa7-4acf-bdc5-93d6996c7806" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 781.936655] env[61273]: DEBUG nova.compute.manager [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 781.936841] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 781.937217] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-be673690-40d8-4593-95a2-87b27be8fd90 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.946628] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b380ec33-de23-4626-a192-ba428c741178 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.968603] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e62c0b97-cfa7-4acf-bdc5-93d6996c7806 could not be found. [ 781.968936] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 781.969120] env[61273]: INFO nova.compute.manager [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] Took 0.03 seconds to destroy the instance on the hypervisor. [ 781.969397] env[61273]: DEBUG oslo.service.loopingcall [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 781.969610] env[61273]: DEBUG nova.compute.manager [-] [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 781.969679] env[61273]: DEBUG nova.network.neutron [-] [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 781.990895] env[61273]: DEBUG nova.network.neutron [-] [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 781.991439] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 198c0e39722a46b3a75b9c330442d9d1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 781.998858] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 198c0e39722a46b3a75b9c330442d9d1 [ 782.358390] env[61273]: DEBUG nova.compute.manager [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 782.378086] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.594s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 782.378728] env[61273]: ERROR nova.compute.manager [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b17ade9b-9979-4941-8442-f0ef91d65a14, please check neutron logs for more information. [ 782.378728] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] Traceback (most recent call last): [ 782.378728] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 782.378728] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] self.driver.spawn(context, instance, image_meta, [ 782.378728] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 782.378728] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] self._vmops.spawn(context, instance, image_meta, injected_files, [ 782.378728] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 782.378728] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] vm_ref = self.build_virtual_machine(instance, [ 782.378728] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 782.378728] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] vif_infos = vmwarevif.get_vif_info(self._session, [ 782.378728] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 782.379149] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] for vif in network_info: [ 782.379149] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 782.379149] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] return self._sync_wrapper(fn, *args, **kwargs) [ 782.379149] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 782.379149] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] self.wait() [ 782.379149] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 782.379149] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] self[:] = self._gt.wait() [ 782.379149] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 782.379149] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] return self._exit_event.wait() [ 782.379149] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 782.379149] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] result = hub.switch() [ 782.379149] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 782.379149] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] return self.greenlet.switch() [ 782.379638] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 782.379638] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] result = function(*args, **kwargs) [ 782.379638] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 782.379638] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] return func(*args, **kwargs) [ 782.379638] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 782.379638] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] raise e [ 782.379638] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 782.379638] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] nwinfo = self.network_api.allocate_for_instance( [ 782.379638] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 782.379638] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] created_port_ids = self._update_ports_for_instance( [ 782.379638] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 782.379638] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] with excutils.save_and_reraise_exception(): [ 782.379638] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 782.380119] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] self.force_reraise() [ 782.380119] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 782.380119] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] raise self.value [ 782.380119] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 782.380119] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] updated_port = self._update_port( [ 782.380119] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 782.380119] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] _ensure_no_port_binding_failure(port) [ 782.380119] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 782.380119] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] raise exception.PortBindingFailed(port_id=port['id']) [ 782.380119] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] nova.exception.PortBindingFailed: Binding failed for port b17ade9b-9979-4941-8442-f0ef91d65a14, please check neutron logs for more information. [ 782.380119] env[61273]: ERROR nova.compute.manager [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] [ 782.380508] env[61273]: DEBUG nova.compute.utils [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] Binding failed for port b17ade9b-9979-4941-8442-f0ef91d65a14, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 782.382273] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 23.862s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 782.384130] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg 8f14f3e50b5b41e48f2e759d23411bee in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 782.385926] env[61273]: DEBUG nova.compute.manager [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] Build of instance 2cd160c6-98ac-44a7-831e-d0fa3a958b99 was re-scheduled: Binding failed for port b17ade9b-9979-4941-8442-f0ef91d65a14, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 782.386372] env[61273]: DEBUG nova.compute.manager [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 782.386595] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] Acquiring lock "refresh_cache-2cd160c6-98ac-44a7-831e-d0fa3a958b99" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 782.386739] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] Acquired lock "refresh_cache-2cd160c6-98ac-44a7-831e-d0fa3a958b99" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 782.386896] env[61273]: DEBUG nova.network.neutron [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 782.387286] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] Expecting reply to msg 458d09acca264d2096ab3c63fca63bbb in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 782.396875] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 458d09acca264d2096ab3c63fca63bbb [ 782.399414] env[61273]: DEBUG nova.virt.hardware [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 782.399724] env[61273]: DEBUG nova.virt.hardware [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 782.399818] env[61273]: DEBUG nova.virt.hardware [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 782.400012] env[61273]: DEBUG nova.virt.hardware [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 782.400192] env[61273]: DEBUG nova.virt.hardware [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 782.400355] env[61273]: DEBUG nova.virt.hardware [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 782.400560] env[61273]: DEBUG nova.virt.hardware [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 782.400753] env[61273]: DEBUG nova.virt.hardware [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 782.400978] env[61273]: DEBUG nova.virt.hardware [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 782.401176] env[61273]: DEBUG nova.virt.hardware [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 782.401353] env[61273]: DEBUG nova.virt.hardware [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 782.402441] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd0ac7dc-2e2b-40fa-bf09-1240918c8c4f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.417426] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffb0e0d2-bb44-468e-a5aa-dd8a0450e45a {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.420908] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8f14f3e50b5b41e48f2e759d23411bee [ 782.495278] env[61273]: DEBUG nova.network.neutron [-] [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 782.495756] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 6ac1906170e44eada69cd9f5761d829f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 782.503863] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6ac1906170e44eada69cd9f5761d829f [ 782.576100] env[61273]: ERROR nova.compute.manager [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e8d4b3f1-4266-42ad-a1e2-e0967aa5c4c2, please check neutron logs for more information. [ 782.576100] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 782.576100] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 782.576100] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 782.576100] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 782.576100] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 782.576100] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 782.576100] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 782.576100] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 782.576100] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 782.576100] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 782.576100] env[61273]: ERROR nova.compute.manager raise self.value [ 782.576100] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 782.576100] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 782.576100] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 782.576100] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 782.576541] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 782.576541] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 782.576541] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e8d4b3f1-4266-42ad-a1e2-e0967aa5c4c2, please check neutron logs for more information. [ 782.576541] env[61273]: ERROR nova.compute.manager [ 782.576541] env[61273]: Traceback (most recent call last): [ 782.576541] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 782.576541] env[61273]: listener.cb(fileno) [ 782.576541] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 782.576541] env[61273]: result = function(*args, **kwargs) [ 782.576541] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 782.576541] env[61273]: return func(*args, **kwargs) [ 782.576541] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 782.576541] env[61273]: raise e [ 782.576541] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 782.576541] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 782.576541] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 782.576541] env[61273]: created_port_ids = self._update_ports_for_instance( [ 782.576541] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 782.576541] env[61273]: with excutils.save_and_reraise_exception(): [ 782.576541] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 782.576541] env[61273]: self.force_reraise() [ 782.576541] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 782.576541] env[61273]: raise self.value [ 782.576541] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 782.576541] env[61273]: updated_port = self._update_port( [ 782.576541] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 782.576541] env[61273]: _ensure_no_port_binding_failure(port) [ 782.576541] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 782.576541] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 782.577302] env[61273]: nova.exception.PortBindingFailed: Binding failed for port e8d4b3f1-4266-42ad-a1e2-e0967aa5c4c2, please check neutron logs for more information. [ 782.577302] env[61273]: Removing descriptor: 15 [ 782.577302] env[61273]: ERROR nova.compute.manager [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e8d4b3f1-4266-42ad-a1e2-e0967aa5c4c2, please check neutron logs for more information. [ 782.577302] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] Traceback (most recent call last): [ 782.577302] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 782.577302] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] yield resources [ 782.577302] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 782.577302] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] self.driver.spawn(context, instance, image_meta, [ 782.577302] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 782.577302] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 782.577302] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 782.577302] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] vm_ref = self.build_virtual_machine(instance, [ 782.577626] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 782.577626] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] vif_infos = vmwarevif.get_vif_info(self._session, [ 782.577626] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 782.577626] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] for vif in network_info: [ 782.577626] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 782.577626] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] return self._sync_wrapper(fn, *args, **kwargs) [ 782.577626] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 782.577626] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] self.wait() [ 782.577626] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 782.577626] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] self[:] = self._gt.wait() [ 782.577626] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 782.577626] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] return self._exit_event.wait() [ 782.577626] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 782.578159] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] result = hub.switch() [ 782.578159] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 782.578159] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] return self.greenlet.switch() [ 782.578159] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 782.578159] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] result = function(*args, **kwargs) [ 782.578159] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 782.578159] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] return func(*args, **kwargs) [ 782.578159] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 782.578159] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] raise e [ 782.578159] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 782.578159] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] nwinfo = self.network_api.allocate_for_instance( [ 782.578159] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 782.578159] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] created_port_ids = self._update_ports_for_instance( [ 782.578650] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 782.578650] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] with excutils.save_and_reraise_exception(): [ 782.578650] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 782.578650] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] self.force_reraise() [ 782.578650] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 782.578650] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] raise self.value [ 782.578650] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 782.578650] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] updated_port = self._update_port( [ 782.578650] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 782.578650] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] _ensure_no_port_binding_failure(port) [ 782.578650] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 782.578650] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] raise exception.PortBindingFailed(port_id=port['id']) [ 782.578982] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] nova.exception.PortBindingFailed: Binding failed for port e8d4b3f1-4266-42ad-a1e2-e0967aa5c4c2, please check neutron logs for more information. [ 782.578982] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] [ 782.578982] env[61273]: INFO nova.compute.manager [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] Terminating instance [ 782.579260] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Acquiring lock "refresh_cache-7bfdc548-4f10-4525-9ea1-3781f90ca81d" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 782.579415] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Acquired lock "refresh_cache-7bfdc548-4f10-4525-9ea1-3781f90ca81d" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 782.579578] env[61273]: DEBUG nova.network.neutron [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 782.579988] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Expecting reply to msg a61e146049d34c94850e09de94302eb8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 782.610503] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a61e146049d34c94850e09de94302eb8 [ 782.914413] env[61273]: DEBUG nova.network.neutron [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 782.994289] env[61273]: DEBUG nova.network.neutron [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 782.994742] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] Expecting reply to msg 61e827dc031b4f21966f5da544fa5c41 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 782.997345] env[61273]: INFO nova.compute.manager [-] [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] Took 1.03 seconds to deallocate network for instance. [ 782.999486] env[61273]: DEBUG nova.compute.claims [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 782.999747] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 783.003924] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 61e827dc031b4f21966f5da544fa5c41 [ 783.126244] env[61273]: DEBUG nova.network.neutron [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 783.204048] env[61273]: DEBUG nova.network.neutron [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 783.204656] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Expecting reply to msg d79f88364262401b925becbe260872ed in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 783.213133] env[61273]: DEBUG nova.compute.manager [req-da29fbf1-e65d-4790-b7e9-578313229c91 req-9eb9d572-b33c-4f59-b95f-87708e188127 service nova] [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] Received event network-changed-e8d4b3f1-4266-42ad-a1e2-e0967aa5c4c2 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 783.213330] env[61273]: DEBUG nova.compute.manager [req-da29fbf1-e65d-4790-b7e9-578313229c91 req-9eb9d572-b33c-4f59-b95f-87708e188127 service nova] [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] Refreshing instance network info cache due to event network-changed-e8d4b3f1-4266-42ad-a1e2-e0967aa5c4c2. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 783.213570] env[61273]: DEBUG oslo_concurrency.lockutils [req-da29fbf1-e65d-4790-b7e9-578313229c91 req-9eb9d572-b33c-4f59-b95f-87708e188127 service nova] Acquiring lock "refresh_cache-7bfdc548-4f10-4525-9ea1-3781f90ca81d" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 783.214018] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d79f88364262401b925becbe260872ed [ 783.231945] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39ec16a9-0bd8-4de0-bcb8-08b528085d76 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.239859] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-683301c1-f3cb-47c8-ba1c-85c8fcbd0e7c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.272736] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a60b5b49-74cc-4523-927c-1ecc954280e2 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.280029] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8091fe8d-5177-4bcf-a001-b545733e7213 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.293182] env[61273]: DEBUG nova.compute.provider_tree [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 783.293748] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg 5a20faa469664226b415821aa342a583 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 783.309022] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5a20faa469664226b415821aa342a583 [ 783.497608] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] Releasing lock "refresh_cache-2cd160c6-98ac-44a7-831e-d0fa3a958b99" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 783.498103] env[61273]: DEBUG nova.compute.manager [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 783.498421] env[61273]: DEBUG nova.compute.manager [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 783.498664] env[61273]: DEBUG nova.network.neutron [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 783.515471] env[61273]: DEBUG nova.network.neutron [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 783.516117] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] Expecting reply to msg 8640412abc8c495d841b54393b7e985f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 783.523626] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8640412abc8c495d841b54393b7e985f [ 783.706554] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Releasing lock "refresh_cache-7bfdc548-4f10-4525-9ea1-3781f90ca81d" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 783.707096] env[61273]: DEBUG nova.compute.manager [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 783.707522] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 783.707967] env[61273]: DEBUG oslo_concurrency.lockutils [req-da29fbf1-e65d-4790-b7e9-578313229c91 req-9eb9d572-b33c-4f59-b95f-87708e188127 service nova] Acquired lock "refresh_cache-7bfdc548-4f10-4525-9ea1-3781f90ca81d" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 783.708215] env[61273]: DEBUG nova.network.neutron [req-da29fbf1-e65d-4790-b7e9-578313229c91 req-9eb9d572-b33c-4f59-b95f-87708e188127 service nova] [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] Refreshing network info cache for port e8d4b3f1-4266-42ad-a1e2-e0967aa5c4c2 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 783.708689] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-da29fbf1-e65d-4790-b7e9-578313229c91 req-9eb9d572-b33c-4f59-b95f-87708e188127 service nova] Expecting reply to msg 0d905d9b04504d9e9e2e30b867815a69 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 783.709907] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f7326e10-fc8d-43fe-addd-ec2e4d6281b9 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.719162] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-900a751f-122f-4070-ab39-5e1da1b3d047 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.731378] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0d905d9b04504d9e9e2e30b867815a69 [ 783.742933] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7bfdc548-4f10-4525-9ea1-3781f90ca81d could not be found. [ 783.743156] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 783.743333] env[61273]: INFO nova.compute.manager [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] Took 0.04 seconds to destroy the instance on the hypervisor. [ 783.743622] env[61273]: DEBUG oslo.service.loopingcall [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 783.743890] env[61273]: DEBUG nova.compute.manager [-] [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 783.743988] env[61273]: DEBUG nova.network.neutron [-] [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 783.796020] env[61273]: DEBUG nova.scheduler.client.report [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 783.798403] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg 12baa19807b24157b06ef8e9b6a2a8c7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 783.805183] env[61273]: DEBUG nova.network.neutron [-] [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 783.812904] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 12baa19807b24157b06ef8e9b6a2a8c7 [ 784.018177] env[61273]: DEBUG nova.network.neutron [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 784.018721] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] Expecting reply to msg 76bb8008e707464199467d8171bd858a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 784.027760] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 76bb8008e707464199467d8171bd858a [ 784.110726] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg da705235ff8f448e92dcd77afa426b8b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 784.117214] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg da705235ff8f448e92dcd77afa426b8b [ 784.228445] env[61273]: DEBUG nova.network.neutron [req-da29fbf1-e65d-4790-b7e9-578313229c91 req-9eb9d572-b33c-4f59-b95f-87708e188127 service nova] [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 784.296963] env[61273]: DEBUG nova.network.neutron [req-da29fbf1-e65d-4790-b7e9-578313229c91 req-9eb9d572-b33c-4f59-b95f-87708e188127 service nova] [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 784.297580] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-da29fbf1-e65d-4790-b7e9-578313229c91 req-9eb9d572-b33c-4f59-b95f-87708e188127 service nova] Expecting reply to msg 1667d689a34146aabf321dafe007fb1e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 784.300549] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.918s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 784.301149] env[61273]: ERROR nova.compute.manager [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 6d97bb17-557c-40e8-91c4-7a67086718c1, please check neutron logs for more information. [ 784.301149] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] Traceback (most recent call last): [ 784.301149] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 784.301149] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] self.driver.spawn(context, instance, image_meta, [ 784.301149] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 784.301149] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 784.301149] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 784.301149] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] vm_ref = self.build_virtual_machine(instance, [ 784.301149] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 784.301149] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] vif_infos = vmwarevif.get_vif_info(self._session, [ 784.301149] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 784.301432] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] for vif in network_info: [ 784.301432] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 784.301432] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] return self._sync_wrapper(fn, *args, **kwargs) [ 784.301432] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 784.301432] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] self.wait() [ 784.301432] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 784.301432] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] self[:] = self._gt.wait() [ 784.301432] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 784.301432] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] return self._exit_event.wait() [ 784.301432] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 784.301432] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] result = hub.switch() [ 784.301432] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 784.301432] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] return self.greenlet.switch() [ 784.301755] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 784.301755] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] result = function(*args, **kwargs) [ 784.301755] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 784.301755] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] return func(*args, **kwargs) [ 784.301755] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 784.301755] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] raise e [ 784.301755] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 784.301755] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] nwinfo = self.network_api.allocate_for_instance( [ 784.301755] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 784.301755] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] created_port_ids = self._update_ports_for_instance( [ 784.301755] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 784.301755] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] with excutils.save_and_reraise_exception(): [ 784.301755] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 784.302045] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] self.force_reraise() [ 784.302045] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 784.302045] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] raise self.value [ 784.302045] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 784.302045] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] updated_port = self._update_port( [ 784.302045] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 784.302045] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] _ensure_no_port_binding_failure(port) [ 784.302045] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 784.302045] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] raise exception.PortBindingFailed(port_id=port['id']) [ 784.302045] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] nova.exception.PortBindingFailed: Binding failed for port 6d97bb17-557c-40e8-91c4-7a67086718c1, please check neutron logs for more information. [ 784.302045] env[61273]: ERROR nova.compute.manager [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] [ 784.302295] env[61273]: DEBUG nova.compute.utils [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] Binding failed for port 6d97bb17-557c-40e8-91c4-7a67086718c1, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 784.303357] env[61273]: DEBUG nova.compute.manager [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] Build of instance 82f77423-cee6-4a04-8463-cabe57cba9cf was re-scheduled: Binding failed for port 6d97bb17-557c-40e8-91c4-7a67086718c1, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 784.303815] env[61273]: DEBUG nova.compute.manager [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 784.303997] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Acquiring lock "refresh_cache-82f77423-cee6-4a04-8463-cabe57cba9cf" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 784.304167] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Acquired lock "refresh_cache-82f77423-cee6-4a04-8463-cabe57cba9cf" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 784.304327] env[61273]: DEBUG nova.network.neutron [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 784.304700] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg 857b8e057abf4063929b6d6a2c5e5e58 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 784.309321] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1667d689a34146aabf321dafe007fb1e [ 784.309321] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 23.632s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 784.309321] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg 7076f91fa7bb43bdbe64329260b0ccfb in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 784.311625] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 857b8e057abf4063929b6d6a2c5e5e58 [ 784.362960] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7076f91fa7bb43bdbe64329260b0ccfb [ 784.521403] env[61273]: INFO nova.compute.manager [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] [instance: 2cd160c6-98ac-44a7-831e-d0fa3a958b99] Took 1.02 seconds to deallocate network for instance. [ 784.523205] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] Expecting reply to msg f0343495c97e47849d5b0a85a8912929 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 784.555760] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f0343495c97e47849d5b0a85a8912929 [ 784.612679] env[61273]: DEBUG nova.network.neutron [-] [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 784.613129] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg a78717d6d6a6448eb8d9b232208a5955 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 784.621092] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a78717d6d6a6448eb8d9b232208a5955 [ 784.800673] env[61273]: DEBUG oslo_concurrency.lockutils [req-da29fbf1-e65d-4790-b7e9-578313229c91 req-9eb9d572-b33c-4f59-b95f-87708e188127 service nova] Releasing lock "refresh_cache-7bfdc548-4f10-4525-9ea1-3781f90ca81d" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 784.800963] env[61273]: DEBUG nova.compute.manager [req-da29fbf1-e65d-4790-b7e9-578313229c91 req-9eb9d572-b33c-4f59-b95f-87708e188127 service nova] [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] Received event network-vif-deleted-e8d4b3f1-4266-42ad-a1e2-e0967aa5c4c2 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 784.828380] env[61273]: DEBUG nova.network.neutron [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 784.915223] env[61273]: DEBUG nova.network.neutron [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 784.915821] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg af1a117943ba462faeb8f74000bd3fd2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 784.925087] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg af1a117943ba462faeb8f74000bd3fd2 [ 785.027503] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] Expecting reply to msg 0d17496158a94f29aaae845b99d6cdfa in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 785.061863] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0d17496158a94f29aaae845b99d6cdfa [ 785.115385] env[61273]: INFO nova.compute.manager [-] [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] Took 1.37 seconds to deallocate network for instance. [ 785.118444] env[61273]: DEBUG nova.compute.claims [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 785.118632] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 785.133855] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a97ad3e-99c3-48b9-93ea-f2525ee0e4bb {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.141193] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76e8d9a9-69df-4846-8b45-26e8ced3f170 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.170140] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea5d8371-9d89-4116-986c-5b631d84c1c2 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.177146] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a81a4b8a-feae-4828-a284-d43afe497484 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.189823] env[61273]: DEBUG nova.compute.provider_tree [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 785.190378] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg 4549d01a5ec9435dbd85e2da7deab5bd in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 785.197907] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4549d01a5ec9435dbd85e2da7deab5bd [ 785.418063] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Releasing lock "refresh_cache-82f77423-cee6-4a04-8463-cabe57cba9cf" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 785.418389] env[61273]: DEBUG nova.compute.manager [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 785.418640] env[61273]: DEBUG nova.compute.manager [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 785.418850] env[61273]: DEBUG nova.network.neutron [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 785.442976] env[61273]: DEBUG nova.network.neutron [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 785.443617] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg 99805b855930478881b3d61b88380397 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 785.453106] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 99805b855930478881b3d61b88380397 [ 785.549407] env[61273]: INFO nova.scheduler.client.report [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] Deleted allocations for instance 2cd160c6-98ac-44a7-831e-d0fa3a958b99 [ 785.563444] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] Expecting reply to msg d0289ea3d54646988fadfc8e9b55b9f5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 785.568830] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d0289ea3d54646988fadfc8e9b55b9f5 [ 785.693444] env[61273]: DEBUG nova.scheduler.client.report [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 785.695959] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg c6f7ca082a334e4b9eeb6a375c27df3c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 785.708095] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c6f7ca082a334e4b9eeb6a375c27df3c [ 785.945678] env[61273]: DEBUG nova.network.neutron [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 785.946282] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg ab4ac6905ebd4000a7df98f2bea3516a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 785.955564] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ab4ac6905ebd4000a7df98f2bea3516a [ 786.058600] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e9427d7b-83ba-4396-a2f6-148e7d395fb2 tempest-ServerMetadataTestJSON-473901767 tempest-ServerMetadataTestJSON-473901767-project-member] Lock "2cd160c6-98ac-44a7-831e-d0fa3a958b99" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 121.856s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 786.058600] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] Expecting reply to msg ecacc0635fa54bc89a71495ae2e4f99c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 786.067320] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ecacc0635fa54bc89a71495ae2e4f99c [ 786.198999] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.892s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 786.199768] env[61273]: ERROR nova.compute.manager [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 111bf606-b67a-4d6a-8de1-a66912dc3f30, please check neutron logs for more information. [ 786.199768] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] Traceback (most recent call last): [ 786.199768] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 786.199768] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] self.driver.spawn(context, instance, image_meta, [ 786.199768] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 786.199768] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] self._vmops.spawn(context, instance, image_meta, injected_files, [ 786.199768] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 786.199768] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] vm_ref = self.build_virtual_machine(instance, [ 786.199768] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 786.199768] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] vif_infos = vmwarevif.get_vif_info(self._session, [ 786.199768] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 786.200139] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] for vif in network_info: [ 786.200139] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 786.200139] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] return self._sync_wrapper(fn, *args, **kwargs) [ 786.200139] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 786.200139] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] self.wait() [ 786.200139] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 786.200139] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] self[:] = self._gt.wait() [ 786.200139] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 786.200139] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] return self._exit_event.wait() [ 786.200139] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 786.200139] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] result = hub.switch() [ 786.200139] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 786.200139] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] return self.greenlet.switch() [ 786.200441] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 786.200441] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] result = function(*args, **kwargs) [ 786.200441] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 786.200441] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] return func(*args, **kwargs) [ 786.200441] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 786.200441] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] raise e [ 786.200441] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 786.200441] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] nwinfo = self.network_api.allocate_for_instance( [ 786.200441] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 786.200441] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] created_port_ids = self._update_ports_for_instance( [ 786.200441] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 786.200441] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] with excutils.save_and_reraise_exception(): [ 786.200441] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 786.200734] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] self.force_reraise() [ 786.200734] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 786.200734] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] raise self.value [ 786.200734] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 786.200734] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] updated_port = self._update_port( [ 786.200734] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 786.200734] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] _ensure_no_port_binding_failure(port) [ 786.200734] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 786.200734] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] raise exception.PortBindingFailed(port_id=port['id']) [ 786.200734] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] nova.exception.PortBindingFailed: Binding failed for port 111bf606-b67a-4d6a-8de1-a66912dc3f30, please check neutron logs for more information. [ 786.200734] env[61273]: ERROR nova.compute.manager [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] [ 786.200982] env[61273]: DEBUG nova.compute.utils [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] Binding failed for port 111bf606-b67a-4d6a-8de1-a66912dc3f30, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 786.201773] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.452s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 786.203533] env[61273]: INFO nova.compute.claims [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 786.205103] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] Expecting reply to msg 14b08769d11b4fe39f5f908b2e71b69e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 786.206967] env[61273]: DEBUG nova.compute.manager [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] Build of instance a0a40c68-77e2-4152-ac2e-059f8f7a8f78 was re-scheduled: Binding failed for port 111bf606-b67a-4d6a-8de1-a66912dc3f30, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 786.207389] env[61273]: DEBUG nova.compute.manager [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 786.207606] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Acquiring lock "refresh_cache-a0a40c68-77e2-4152-ac2e-059f8f7a8f78" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 786.207758] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Acquired lock "refresh_cache-a0a40c68-77e2-4152-ac2e-059f8f7a8f78" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.207912] env[61273]: DEBUG nova.network.neutron [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 786.208296] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg dcf18ad26bee4c2e9042d2ac9227f659 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 786.216458] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dcf18ad26bee4c2e9042d2ac9227f659 [ 786.240116] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 14b08769d11b4fe39f5f908b2e71b69e [ 786.448334] env[61273]: INFO nova.compute.manager [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 82f77423-cee6-4a04-8463-cabe57cba9cf] Took 1.03 seconds to deallocate network for instance. [ 786.450021] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg 63b5b510042c404a9f909ef5c4b16aff in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 786.484767] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 63b5b510042c404a9f909ef5c4b16aff [ 786.560689] env[61273]: DEBUG nova.compute.manager [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 786.562413] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] Expecting reply to msg 4f6a1034d9024923a60d22a0b350be0c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 786.594183] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4f6a1034d9024923a60d22a0b350be0c [ 786.711871] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] Expecting reply to msg 0dea22ea318841f48e7583b65ec272a7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 786.719462] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0dea22ea318841f48e7583b65ec272a7 [ 786.736788] env[61273]: DEBUG nova.network.neutron [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 786.850585] env[61273]: DEBUG nova.network.neutron [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 786.851238] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg a7a8127c52e444eeb9e8f046dc417789 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 786.859531] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a7a8127c52e444eeb9e8f046dc417789 [ 786.955509] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg a5bf8eaff97b490e918eea4594e299b0 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 786.994416] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a5bf8eaff97b490e918eea4594e299b0 [ 787.083415] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 787.353246] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Releasing lock "refresh_cache-a0a40c68-77e2-4152-ac2e-059f8f7a8f78" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 787.353478] env[61273]: DEBUG nova.compute.manager [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 787.353651] env[61273]: DEBUG nova.compute.manager [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 787.353810] env[61273]: DEBUG nova.network.neutron [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 787.373143] env[61273]: DEBUG nova.network.neutron [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 787.373821] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg 2a25ef9d28fa4e16afa744c79ed4bf81 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 787.386594] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2a25ef9d28fa4e16afa744c79ed4bf81 [ 787.484947] env[61273]: INFO nova.scheduler.client.report [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Deleted allocations for instance 82f77423-cee6-4a04-8463-cabe57cba9cf [ 787.493391] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg 6b2390d880b04ef9a336dbe6d71bfa8a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 787.514146] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6b2390d880b04ef9a336dbe6d71bfa8a [ 787.579555] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3168224-ddfd-4a28-8e1f-f1c7a6297638 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.588016] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a896471f-6ba8-499d-bbec-38376318c675 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.617284] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0d2ef88-b336-4816-a9b3-f36735658076 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.624358] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ba4ff36-8e14-4116-8118-dd561d8c8d3b {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.637779] env[61273]: DEBUG nova.compute.provider_tree [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 787.638272] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] Expecting reply to msg 0869cfa187344a67a5ace00934e17e10 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 787.645691] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0869cfa187344a67a5ace00934e17e10 [ 787.880486] env[61273]: DEBUG nova.network.neutron [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 787.881122] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg f88a5451c6bf45659f983bb90e81036d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 787.889169] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f88a5451c6bf45659f983bb90e81036d [ 787.995089] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Lock "82f77423-cee6-4a04-8463-cabe57cba9cf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 120.091s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 787.995734] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Expecting reply to msg 6242f8962cde4a9dab18e8192f644afb in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 788.007647] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6242f8962cde4a9dab18e8192f644afb [ 788.140838] env[61273]: DEBUG nova.scheduler.client.report [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 788.143223] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] Expecting reply to msg 472fa1a9e05c4816ae50fa88131de8cd in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 788.154323] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 472fa1a9e05c4816ae50fa88131de8cd [ 788.384054] env[61273]: INFO nova.compute.manager [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: a0a40c68-77e2-4152-ac2e-059f8f7a8f78] Took 1.03 seconds to deallocate network for instance. [ 788.385907] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg 7cce322e6933480bacd5f75b49dbe8d2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 788.423067] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7cce322e6933480bacd5f75b49dbe8d2 [ 788.498373] env[61273]: DEBUG nova.compute.manager [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 788.500080] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Expecting reply to msg fb4c38726fb94ba287fee251c4f3a1ec in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 788.529832] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fb4c38726fb94ba287fee251c4f3a1ec [ 788.645629] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.444s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 788.646166] env[61273]: DEBUG nova.compute.manager [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 788.647832] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] Expecting reply to msg 4945713aef8d4983bcfc0c7df5c15473 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 788.648919] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 25.347s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 788.649099] env[61273]: DEBUG nova.objects.instance [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] [instance: f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61273) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 788.650759] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Expecting reply to msg 4b194e5289c744aeab769beb023a52de in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 788.686803] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4b194e5289c744aeab769beb023a52de [ 788.688284] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4945713aef8d4983bcfc0c7df5c15473 [ 788.890444] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg 57bc87fadcc242bfa52405235f92c459 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 788.936898] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 57bc87fadcc242bfa52405235f92c459 [ 789.017353] env[61273]: DEBUG oslo_concurrency.lockutils [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 789.160442] env[61273]: DEBUG nova.compute.utils [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 789.160442] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] Expecting reply to msg 46359ccc8a6e46419144892a6c9a2f9c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 789.160442] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Expecting reply to msg 004682ba513c4bd888109bea00728340 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 789.160442] env[61273]: DEBUG nova.compute.manager [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 789.160442] env[61273]: DEBUG nova.network.neutron [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 789.165669] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 46359ccc8a6e46419144892a6c9a2f9c [ 789.166767] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 004682ba513c4bd888109bea00728340 [ 789.224237] env[61273]: DEBUG nova.policy [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '30aef3fc075c43bdae873c542998ccd0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '35609af3eb7c487aa755932083286314', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 789.412638] env[61273]: INFO nova.scheduler.client.report [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Deleted allocations for instance a0a40c68-77e2-4152-ac2e-059f8f7a8f78 [ 789.416705] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg ee6cc2aa4a7945088f6ee93f55e27e8c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 789.434385] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ee6cc2aa4a7945088f6ee93f55e27e8c [ 789.660409] env[61273]: DEBUG nova.compute.manager [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 789.663452] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] Expecting reply to msg a4e40f8f83674c23a96c2d95ef75e723 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 789.665367] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.016s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 789.665819] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f0498efe-d98e-4ced-a3ef-1ecc80fe201a tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Expecting reply to msg 0d2f641bd2cc45ecaf6031f38a72684d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 789.667035] env[61273]: DEBUG oslo_concurrency.lockutils [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.084s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 789.668775] env[61273]: INFO nova.compute.claims [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 789.670476] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Expecting reply to msg 9e11aa5a473e4507b2487e4e19fa42df in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 789.685511] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0d2f641bd2cc45ecaf6031f38a72684d [ 789.692805] env[61273]: DEBUG nova.network.neutron [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] Successfully created port: 602900da-fbec-4d44-a5ab-e4570ec93784 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 789.711833] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a4e40f8f83674c23a96c2d95ef75e723 [ 789.722914] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9e11aa5a473e4507b2487e4e19fa42df [ 789.921987] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b5aa5be3-e859-48c8-9d6b-1df3bb5ee1ab tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Lock "a0a40c68-77e2-4152-ac2e-059f8f7a8f78" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 121.991s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 789.922586] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg 05435595de5d4262860669960398e875 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 789.937744] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 05435595de5d4262860669960398e875 [ 790.169920] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] Expecting reply to msg 6180ec445bad43fd9beccc548594dbf1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 790.174999] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Expecting reply to msg be2994785452496a91ea48c2bbc8bca4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 790.190211] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg be2994785452496a91ea48c2bbc8bca4 [ 790.218720] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6180ec445bad43fd9beccc548594dbf1 [ 790.427448] env[61273]: DEBUG nova.compute.manager [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 790.427448] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg 6fb6668a7d9946a4864ed6805a39728d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 790.473544] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6fb6668a7d9946a4864ed6805a39728d [ 790.675701] env[61273]: DEBUG nova.compute.manager [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 790.713949] env[61273]: DEBUG nova.virt.hardware [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 790.714192] env[61273]: DEBUG nova.virt.hardware [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 790.714343] env[61273]: DEBUG nova.virt.hardware [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 790.714519] env[61273]: DEBUG nova.virt.hardware [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 790.714660] env[61273]: DEBUG nova.virt.hardware [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 790.714801] env[61273]: DEBUG nova.virt.hardware [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 790.714999] env[61273]: DEBUG nova.virt.hardware [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 790.715153] env[61273]: DEBUG nova.virt.hardware [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 790.715312] env[61273]: DEBUG nova.virt.hardware [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 790.715470] env[61273]: DEBUG nova.virt.hardware [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 790.715738] env[61273]: DEBUG nova.virt.hardware [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 790.716604] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4053a1d2-a6b2-4323-b631-1bc64dd18ee3 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.727250] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e746b680-3ba0-48a6-a0d5-8efd6e198bd0 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.958914] env[61273]: DEBUG oslo_concurrency.lockutils [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 791.159179] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d7c8275-d6fa-46ba-9bd2-c376cd67cec5 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.167012] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2baea15-3720-4d50-8baa-76fd4fd2dc8c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.203439] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b4ab2da-2e7e-4d9b-bf4a-26a8a5728c42 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.207087] env[61273]: DEBUG nova.compute.manager [req-27ee80d0-c0fd-47fe-b146-7403d8286b29 req-7d456daf-ba33-43a2-83c2-fa909ef2662f service nova] [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] Received event network-changed-602900da-fbec-4d44-a5ab-e4570ec93784 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 791.207273] env[61273]: DEBUG nova.compute.manager [req-27ee80d0-c0fd-47fe-b146-7403d8286b29 req-7d456daf-ba33-43a2-83c2-fa909ef2662f service nova] [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] Refreshing instance network info cache due to event network-changed-602900da-fbec-4d44-a5ab-e4570ec93784. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 791.207485] env[61273]: DEBUG oslo_concurrency.lockutils [req-27ee80d0-c0fd-47fe-b146-7403d8286b29 req-7d456daf-ba33-43a2-83c2-fa909ef2662f service nova] Acquiring lock "refresh_cache-0b400fe1-d0d0-4820-9f56-56ccbad5465a" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 791.207622] env[61273]: DEBUG oslo_concurrency.lockutils [req-27ee80d0-c0fd-47fe-b146-7403d8286b29 req-7d456daf-ba33-43a2-83c2-fa909ef2662f service nova] Acquired lock "refresh_cache-0b400fe1-d0d0-4820-9f56-56ccbad5465a" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 791.207774] env[61273]: DEBUG nova.network.neutron [req-27ee80d0-c0fd-47fe-b146-7403d8286b29 req-7d456daf-ba33-43a2-83c2-fa909ef2662f service nova] [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] Refreshing network info cache for port 602900da-fbec-4d44-a5ab-e4570ec93784 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 791.208982] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-27ee80d0-c0fd-47fe-b146-7403d8286b29 req-7d456daf-ba33-43a2-83c2-fa909ef2662f service nova] Expecting reply to msg 62ca44824d7e416a9990c69dc9f273e9 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 791.215459] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f325efb-ff6e-4ac8-9def-adce36e7295f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.220619] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 62ca44824d7e416a9990c69dc9f273e9 [ 791.232458] env[61273]: DEBUG nova.compute.provider_tree [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 791.233083] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Expecting reply to msg 04d43d90508b46be89725212de163616 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 791.239689] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 04d43d90508b46be89725212de163616 [ 791.261737] env[61273]: ERROR nova.compute.manager [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 602900da-fbec-4d44-a5ab-e4570ec93784, please check neutron logs for more information. [ 791.261737] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 791.261737] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 791.261737] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 791.261737] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 791.261737] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 791.261737] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 791.261737] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 791.261737] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 791.261737] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 791.261737] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 791.261737] env[61273]: ERROR nova.compute.manager raise self.value [ 791.261737] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 791.261737] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 791.261737] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 791.261737] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 791.262205] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 791.262205] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 791.262205] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 602900da-fbec-4d44-a5ab-e4570ec93784, please check neutron logs for more information. [ 791.262205] env[61273]: ERROR nova.compute.manager [ 791.262205] env[61273]: Traceback (most recent call last): [ 791.262205] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 791.262205] env[61273]: listener.cb(fileno) [ 791.262205] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 791.262205] env[61273]: result = function(*args, **kwargs) [ 791.262205] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 791.262205] env[61273]: return func(*args, **kwargs) [ 791.262205] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 791.262205] env[61273]: raise e [ 791.262205] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 791.262205] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 791.262205] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 791.262205] env[61273]: created_port_ids = self._update_ports_for_instance( [ 791.262205] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 791.262205] env[61273]: with excutils.save_and_reraise_exception(): [ 791.262205] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 791.262205] env[61273]: self.force_reraise() [ 791.262205] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 791.262205] env[61273]: raise self.value [ 791.262205] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 791.262205] env[61273]: updated_port = self._update_port( [ 791.262205] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 791.262205] env[61273]: _ensure_no_port_binding_failure(port) [ 791.262205] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 791.262205] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 791.262931] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 602900da-fbec-4d44-a5ab-e4570ec93784, please check neutron logs for more information. [ 791.262931] env[61273]: Removing descriptor: 15 [ 791.263284] env[61273]: ERROR nova.compute.manager [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 602900da-fbec-4d44-a5ab-e4570ec93784, please check neutron logs for more information. [ 791.263284] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] Traceback (most recent call last): [ 791.263284] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 791.263284] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] yield resources [ 791.263284] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 791.263284] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] self.driver.spawn(context, instance, image_meta, [ 791.263284] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 791.263284] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 791.263284] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 791.263284] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] vm_ref = self.build_virtual_machine(instance, [ 791.263284] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 791.263663] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] vif_infos = vmwarevif.get_vif_info(self._session, [ 791.263663] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 791.263663] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] for vif in network_info: [ 791.263663] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 791.263663] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] return self._sync_wrapper(fn, *args, **kwargs) [ 791.263663] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 791.263663] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] self.wait() [ 791.263663] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 791.263663] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] self[:] = self._gt.wait() [ 791.263663] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 791.263663] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] return self._exit_event.wait() [ 791.263663] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 791.263663] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] result = hub.switch() [ 791.263989] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 791.263989] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] return self.greenlet.switch() [ 791.263989] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 791.263989] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] result = function(*args, **kwargs) [ 791.263989] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 791.263989] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] return func(*args, **kwargs) [ 791.263989] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 791.263989] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] raise e [ 791.263989] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 791.263989] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] nwinfo = self.network_api.allocate_for_instance( [ 791.263989] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 791.263989] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] created_port_ids = self._update_ports_for_instance( [ 791.263989] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 791.264459] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] with excutils.save_and_reraise_exception(): [ 791.264459] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 791.264459] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] self.force_reraise() [ 791.264459] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 791.264459] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] raise self.value [ 791.264459] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 791.264459] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] updated_port = self._update_port( [ 791.264459] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 791.264459] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] _ensure_no_port_binding_failure(port) [ 791.264459] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 791.264459] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] raise exception.PortBindingFailed(port_id=port['id']) [ 791.264459] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] nova.exception.PortBindingFailed: Binding failed for port 602900da-fbec-4d44-a5ab-e4570ec93784, please check neutron logs for more information. [ 791.264459] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] [ 791.264856] env[61273]: INFO nova.compute.manager [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] Terminating instance [ 791.266809] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] Acquiring lock "refresh_cache-0b400fe1-d0d0-4820-9f56-56ccbad5465a" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 791.454548] env[61273]: DEBUG oslo_concurrency.lockutils [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Acquiring lock "1fde207b-9d32-4cff-b3fe-d0caddd20f69" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 791.454773] env[61273]: DEBUG oslo_concurrency.lockutils [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Lock "1fde207b-9d32-4cff-b3fe-d0caddd20f69" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 791.484324] env[61273]: DEBUG oslo_concurrency.lockutils [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Acquiring lock "6182ea84-d5f2-4f01-9091-3d7b0b096d7c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 791.484551] env[61273]: DEBUG oslo_concurrency.lockutils [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Lock "6182ea84-d5f2-4f01-9091-3d7b0b096d7c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 791.735240] env[61273]: DEBUG nova.scheduler.client.report [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 791.737885] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Expecting reply to msg 0068fbd3747c4e60a1f78796777723bd in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 791.746158] env[61273]: DEBUG nova.network.neutron [req-27ee80d0-c0fd-47fe-b146-7403d8286b29 req-7d456daf-ba33-43a2-83c2-fa909ef2662f service nova] [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 791.767481] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0068fbd3747c4e60a1f78796777723bd [ 791.841832] env[61273]: DEBUG nova.network.neutron [req-27ee80d0-c0fd-47fe-b146-7403d8286b29 req-7d456daf-ba33-43a2-83c2-fa909ef2662f service nova] [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 791.842391] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-27ee80d0-c0fd-47fe-b146-7403d8286b29 req-7d456daf-ba33-43a2-83c2-fa909ef2662f service nova] Expecting reply to msg 53c050bc18964a26a69f6cd9fcc491d4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 791.851675] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 53c050bc18964a26a69f6cd9fcc491d4 [ 792.240463] env[61273]: DEBUG oslo_concurrency.lockutils [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.573s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 792.240993] env[61273]: DEBUG nova.compute.manager [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 792.242976] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Expecting reply to msg 79ede060e90b4311aade3a1fd4bc9bf8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 792.244159] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.803s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 792.247261] env[61273]: INFO nova.compute.claims [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 792.247388] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Expecting reply to msg cc8ad766f6df4adcb1269594a7c00b50 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 792.278702] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 79ede060e90b4311aade3a1fd4bc9bf8 [ 792.290440] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cc8ad766f6df4adcb1269594a7c00b50 [ 792.345793] env[61273]: DEBUG oslo_concurrency.lockutils [req-27ee80d0-c0fd-47fe-b146-7403d8286b29 req-7d456daf-ba33-43a2-83c2-fa909ef2662f service nova] Releasing lock "refresh_cache-0b400fe1-d0d0-4820-9f56-56ccbad5465a" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 792.346177] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Acquiring lock "27e43d79-6435-46fb-ac71-9be7313d591a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 792.346423] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Lock "27e43d79-6435-46fb-ac71-9be7313d591a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 792.346674] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] Acquired lock "refresh_cache-0b400fe1-d0d0-4820-9f56-56ccbad5465a" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 792.346904] env[61273]: DEBUG nova.network.neutron [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 792.347348] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] Expecting reply to msg 42cb92599ee24a0093021cfb2bee3fdf in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 792.354488] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 42cb92599ee24a0093021cfb2bee3fdf [ 792.752920] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Expecting reply to msg 38696e76b5fe4a4bb2776fb9afe9c96f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 792.755225] env[61273]: DEBUG nova.compute.utils [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 792.756143] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Expecting reply to msg 8664741645824d79b72f374c3eaf3b08 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 792.760665] env[61273]: DEBUG nova.compute.manager [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 792.760665] env[61273]: DEBUG nova.network.neutron [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 792.761907] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 38696e76b5fe4a4bb2776fb9afe9c96f [ 792.769813] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8664741645824d79b72f374c3eaf3b08 [ 792.806988] env[61273]: DEBUG nova.policy [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9aff8d5e22844391837fad86f2cd243a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1463a6a43bbe48f886efed86e8e24482', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 792.865273] env[61273]: DEBUG nova.network.neutron [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 792.959804] env[61273]: DEBUG nova.network.neutron [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 792.960380] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] Expecting reply to msg 45383524bc634cba9a5e2825016e8668 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 792.970057] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 45383524bc634cba9a5e2825016e8668 [ 793.168755] env[61273]: DEBUG nova.network.neutron [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] Successfully created port: eae1b954-6dd0-4d6c-b829-c5b225294270 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 793.211426] env[61273]: DEBUG nova.compute.manager [req-7e4ce0b7-1651-483a-9431-70e1887f6b4e req-2077d25f-fec9-4469-818c-d8aca04172f8 service nova] [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] Received event network-vif-deleted-602900da-fbec-4d44-a5ab-e4570ec93784 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 793.262391] env[61273]: DEBUG nova.compute.manager [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 793.264242] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Expecting reply to msg 9dbc7573ed7343f0a06237c04ee4deba in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 793.322830] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9dbc7573ed7343f0a06237c04ee4deba [ 793.462845] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] Releasing lock "refresh_cache-0b400fe1-d0d0-4820-9f56-56ccbad5465a" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 793.463483] env[61273]: DEBUG nova.compute.manager [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 793.463483] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 793.463926] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-19deb3d8-c8ec-4240-be3a-9199436b310a {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.474520] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4796095-61f5-47bc-94ec-2d4b411e71e8 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.500713] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0b400fe1-d0d0-4820-9f56-56ccbad5465a could not be found. [ 793.500969] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 793.501304] env[61273]: INFO nova.compute.manager [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] Took 0.04 seconds to destroy the instance on the hypervisor. [ 793.501428] env[61273]: DEBUG oslo.service.loopingcall [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 793.501607] env[61273]: DEBUG nova.compute.manager [-] [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 793.501727] env[61273]: DEBUG nova.network.neutron [-] [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 793.518772] env[61273]: DEBUG nova.network.neutron [-] [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 793.519251] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 533e58bde4bc419d995dc0162f64f860 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 793.527712] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 533e58bde4bc419d995dc0162f64f860 [ 793.609595] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d144ab1-94ce-4e09-af72-f48c7246df4f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.616572] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37ecef23-7894-4d9f-9560-0b934055cf42 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.646500] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f59f3908-31c8-480a-8a96-469c2839a95d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.653707] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8768ba4a-2ba3-48c2-97aa-245e7b1b0942 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.666802] env[61273]: DEBUG nova.compute.provider_tree [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 793.667291] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Expecting reply to msg c7358a5365144548aaccd3ea13ca12e6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 793.674746] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c7358a5365144548aaccd3ea13ca12e6 [ 793.774766] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Expecting reply to msg 287c12cb53d040c9b777d135e45d53e3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 793.807445] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 287c12cb53d040c9b777d135e45d53e3 [ 793.970442] env[61273]: ERROR nova.compute.manager [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port eae1b954-6dd0-4d6c-b829-c5b225294270, please check neutron logs for more information. [ 793.970442] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 793.970442] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 793.970442] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 793.970442] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 793.970442] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 793.970442] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 793.970442] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 793.970442] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 793.970442] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 793.970442] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 793.970442] env[61273]: ERROR nova.compute.manager raise self.value [ 793.970442] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 793.970442] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 793.970442] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 793.970442] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 793.970907] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 793.970907] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 793.970907] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port eae1b954-6dd0-4d6c-b829-c5b225294270, please check neutron logs for more information. [ 793.970907] env[61273]: ERROR nova.compute.manager [ 793.970907] env[61273]: Traceback (most recent call last): [ 793.970907] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 793.970907] env[61273]: listener.cb(fileno) [ 793.970907] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 793.970907] env[61273]: result = function(*args, **kwargs) [ 793.970907] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 793.970907] env[61273]: return func(*args, **kwargs) [ 793.970907] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 793.970907] env[61273]: raise e [ 793.970907] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 793.970907] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 793.970907] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 793.970907] env[61273]: created_port_ids = self._update_ports_for_instance( [ 793.970907] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 793.970907] env[61273]: with excutils.save_and_reraise_exception(): [ 793.970907] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 793.970907] env[61273]: self.force_reraise() [ 793.970907] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 793.970907] env[61273]: raise self.value [ 793.970907] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 793.970907] env[61273]: updated_port = self._update_port( [ 793.970907] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 793.970907] env[61273]: _ensure_no_port_binding_failure(port) [ 793.970907] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 793.970907] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 793.971692] env[61273]: nova.exception.PortBindingFailed: Binding failed for port eae1b954-6dd0-4d6c-b829-c5b225294270, please check neutron logs for more information. [ 793.971692] env[61273]: Removing descriptor: 15 [ 794.021373] env[61273]: DEBUG nova.network.neutron [-] [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 794.022134] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 6553cf9a93844c15afc38d5311583c28 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 794.033165] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6553cf9a93844c15afc38d5311583c28 [ 794.170368] env[61273]: DEBUG nova.scheduler.client.report [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 794.173132] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Expecting reply to msg 517e109c9af9467bb88b97de53fdaecd in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 794.187612] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 517e109c9af9467bb88b97de53fdaecd [ 794.278166] env[61273]: DEBUG nova.compute.manager [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 794.302857] env[61273]: DEBUG nova.virt.hardware [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 794.303111] env[61273]: DEBUG nova.virt.hardware [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 794.303267] env[61273]: DEBUG nova.virt.hardware [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 794.303445] env[61273]: DEBUG nova.virt.hardware [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 794.303589] env[61273]: DEBUG nova.virt.hardware [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 794.303791] env[61273]: DEBUG nova.virt.hardware [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 794.304080] env[61273]: DEBUG nova.virt.hardware [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 794.304277] env[61273]: DEBUG nova.virt.hardware [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 794.304449] env[61273]: DEBUG nova.virt.hardware [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 794.304611] env[61273]: DEBUG nova.virt.hardware [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 794.304782] env[61273]: DEBUG nova.virt.hardware [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 794.305619] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92d554a2-adf8-48c8-a529-ecf07982ca1b {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.313023] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b2e2c8b-6147-4473-86d4-f567a66389be {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.326247] env[61273]: ERROR nova.compute.manager [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port eae1b954-6dd0-4d6c-b829-c5b225294270, please check neutron logs for more information. [ 794.326247] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] Traceback (most recent call last): [ 794.326247] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 794.326247] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] yield resources [ 794.326247] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 794.326247] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] self.driver.spawn(context, instance, image_meta, [ 794.326247] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 794.326247] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] self._vmops.spawn(context, instance, image_meta, injected_files, [ 794.326247] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 794.326247] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] vm_ref = self.build_virtual_machine(instance, [ 794.326247] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 794.326616] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] vif_infos = vmwarevif.get_vif_info(self._session, [ 794.326616] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 794.326616] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] for vif in network_info: [ 794.326616] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 794.326616] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] return self._sync_wrapper(fn, *args, **kwargs) [ 794.326616] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 794.326616] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] self.wait() [ 794.326616] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 794.326616] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] self[:] = self._gt.wait() [ 794.326616] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 794.326616] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] return self._exit_event.wait() [ 794.326616] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 794.326616] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] current.throw(*self._exc) [ 794.326982] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 794.326982] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] result = function(*args, **kwargs) [ 794.326982] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 794.326982] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] return func(*args, **kwargs) [ 794.326982] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 794.326982] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] raise e [ 794.326982] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 794.326982] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] nwinfo = self.network_api.allocate_for_instance( [ 794.326982] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 794.326982] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] created_port_ids = self._update_ports_for_instance( [ 794.326982] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 794.326982] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] with excutils.save_and_reraise_exception(): [ 794.326982] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 794.327269] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] self.force_reraise() [ 794.327269] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 794.327269] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] raise self.value [ 794.327269] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 794.327269] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] updated_port = self._update_port( [ 794.327269] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 794.327269] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] _ensure_no_port_binding_failure(port) [ 794.327269] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 794.327269] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] raise exception.PortBindingFailed(port_id=port['id']) [ 794.327269] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] nova.exception.PortBindingFailed: Binding failed for port eae1b954-6dd0-4d6c-b829-c5b225294270, please check neutron logs for more information. [ 794.327269] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] [ 794.327269] env[61273]: INFO nova.compute.manager [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] Terminating instance [ 794.331030] env[61273]: DEBUG oslo_concurrency.lockutils [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Acquiring lock "refresh_cache-e6108eed-93b4-40a5-a61b-67aa5bbe2fda" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 794.331257] env[61273]: DEBUG oslo_concurrency.lockutils [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Acquired lock "refresh_cache-e6108eed-93b4-40a5-a61b-67aa5bbe2fda" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 794.331477] env[61273]: DEBUG nova.network.neutron [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 794.331985] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Expecting reply to msg 93422c62e5194a52b6d07e23ce7632ad in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 794.338525] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 93422c62e5194a52b6d07e23ce7632ad [ 794.524938] env[61273]: INFO nova.compute.manager [-] [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] Took 1.02 seconds to deallocate network for instance. [ 794.527284] env[61273]: DEBUG nova.compute.claims [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 794.527543] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 794.676421] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.432s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 794.676984] env[61273]: DEBUG nova.compute.manager [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 794.678627] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Expecting reply to msg c1ef4003829449fbb7ddb3c3c736d5d1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 794.680643] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f2786bfc-4058-4688-bc00-570d55c66988 tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.783s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 794.680643] env[61273]: DEBUG nova.objects.instance [None req-f2786bfc-4058-4688-bc00-570d55c66988 tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Lazy-loading 'resources' on Instance uuid f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc {{(pid=61273) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 794.680643] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f2786bfc-4058-4688-bc00-570d55c66988 tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Expecting reply to msg 88173889ba83464fa8fd8e6540397107 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 794.688581] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 88173889ba83464fa8fd8e6540397107 [ 794.713893] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c1ef4003829449fbb7ddb3c3c736d5d1 [ 794.848051] env[61273]: DEBUG nova.network.neutron [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 794.922165] env[61273]: DEBUG nova.network.neutron [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 794.922671] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Expecting reply to msg 61ad01e3e76e48c4a4d5e6d2f5ac665e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 794.932539] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 61ad01e3e76e48c4a4d5e6d2f5ac665e [ 795.186902] env[61273]: DEBUG nova.compute.utils [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 795.187521] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Expecting reply to msg 1049918a46da49539b841275d8b2471c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 795.189064] env[61273]: DEBUG nova.compute.manager [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 795.189064] env[61273]: DEBUG nova.network.neutron [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 795.199458] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1049918a46da49539b841275d8b2471c [ 795.240214] env[61273]: DEBUG nova.policy [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5cb53d9709f844fe9b79a83c6fd96729', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3051fa87e7e647c5a3d7bdd0e4c63778', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 795.248149] env[61273]: DEBUG nova.compute.manager [req-29e736cf-97a0-45e7-92f6-89f21aaa18c3 req-149179e8-5eed-4d64-8b45-91bf420f6b95 service nova] [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] Received event network-changed-eae1b954-6dd0-4d6c-b829-c5b225294270 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 795.248352] env[61273]: DEBUG nova.compute.manager [req-29e736cf-97a0-45e7-92f6-89f21aaa18c3 req-149179e8-5eed-4d64-8b45-91bf420f6b95 service nova] [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] Refreshing instance network info cache due to event network-changed-eae1b954-6dd0-4d6c-b829-c5b225294270. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 795.248538] env[61273]: DEBUG oslo_concurrency.lockutils [req-29e736cf-97a0-45e7-92f6-89f21aaa18c3 req-149179e8-5eed-4d64-8b45-91bf420f6b95 service nova] Acquiring lock "refresh_cache-e6108eed-93b4-40a5-a61b-67aa5bbe2fda" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 795.425437] env[61273]: DEBUG oslo_concurrency.lockutils [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Releasing lock "refresh_cache-e6108eed-93b4-40a5-a61b-67aa5bbe2fda" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 795.425850] env[61273]: DEBUG nova.compute.manager [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 795.426059] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 795.426369] env[61273]: DEBUG oslo_concurrency.lockutils [req-29e736cf-97a0-45e7-92f6-89f21aaa18c3 req-149179e8-5eed-4d64-8b45-91bf420f6b95 service nova] Acquired lock "refresh_cache-e6108eed-93b4-40a5-a61b-67aa5bbe2fda" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 795.426540] env[61273]: DEBUG nova.network.neutron [req-29e736cf-97a0-45e7-92f6-89f21aaa18c3 req-149179e8-5eed-4d64-8b45-91bf420f6b95 service nova] [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] Refreshing network info cache for port eae1b954-6dd0-4d6c-b829-c5b225294270 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 795.427071] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-29e736cf-97a0-45e7-92f6-89f21aaa18c3 req-149179e8-5eed-4d64-8b45-91bf420f6b95 service nova] Expecting reply to msg 3b789f90e23d409187c52b110822300b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 795.434122] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-892cbe36-d1e3-4eea-8c9f-a048c108d2e3 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.434888] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3b789f90e23d409187c52b110822300b [ 795.437819] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04cff82d-2c1e-42ed-81a6-1b26cea30181 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.459721] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e6108eed-93b4-40a5-a61b-67aa5bbe2fda could not be found. [ 795.459976] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 795.460184] env[61273]: INFO nova.compute.manager [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] Took 0.03 seconds to destroy the instance on the hypervisor. [ 795.460426] env[61273]: DEBUG oslo.service.loopingcall [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 795.460645] env[61273]: DEBUG nova.compute.manager [-] [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 795.460738] env[61273]: DEBUG nova.network.neutron [-] [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 795.489913] env[61273]: DEBUG nova.network.neutron [-] [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 795.490416] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 58766b92602c4d008caa2a131a2843bb in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 795.497757] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 58766b92602c4d008caa2a131a2843bb [ 795.510378] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3f7ffe9-d558-44f3-a709-ab8e36fc6dc7 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.518085] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c7c858a-8a28-4bf1-b3b1-78749df73f96 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.547335] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-751a8fbe-a0c3-4351-bf21-c2e445ecf8e2 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.554702] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f105419-52d1-4626-822a-009f941d77f5 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.570967] env[61273]: DEBUG nova.compute.provider_tree [None req-f2786bfc-4058-4688-bc00-570d55c66988 tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 795.571415] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f2786bfc-4058-4688-bc00-570d55c66988 tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Expecting reply to msg 8d07dff0d32849eca208ba9770221e28 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 795.578877] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8d07dff0d32849eca208ba9770221e28 [ 795.692478] env[61273]: DEBUG nova.compute.manager [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 795.694083] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Expecting reply to msg 237bf281753f49a6b7e8ed7681cb9b8c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 795.715370] env[61273]: DEBUG nova.network.neutron [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] Successfully created port: 86a1da90-c2b6-4b02-b3e4-60cb6f775b2c {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 795.727743] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 237bf281753f49a6b7e8ed7681cb9b8c [ 795.947448] env[61273]: DEBUG nova.network.neutron [req-29e736cf-97a0-45e7-92f6-89f21aaa18c3 req-149179e8-5eed-4d64-8b45-91bf420f6b95 service nova] [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 795.992646] env[61273]: DEBUG nova.network.neutron [-] [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 795.993080] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg fb0e379e657247c6af0d6162513a0dd1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 796.001309] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fb0e379e657247c6af0d6162513a0dd1 [ 796.039185] env[61273]: DEBUG nova.network.neutron [req-29e736cf-97a0-45e7-92f6-89f21aaa18c3 req-149179e8-5eed-4d64-8b45-91bf420f6b95 service nova] [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 796.039757] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-29e736cf-97a0-45e7-92f6-89f21aaa18c3 req-149179e8-5eed-4d64-8b45-91bf420f6b95 service nova] Expecting reply to msg 94a9691874cf4022ab85e353d9dbfb24 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 796.048905] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 94a9691874cf4022ab85e353d9dbfb24 [ 796.075290] env[61273]: DEBUG nova.scheduler.client.report [None req-f2786bfc-4058-4688-bc00-570d55c66988 tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 796.076671] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f2786bfc-4058-4688-bc00-570d55c66988 tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Expecting reply to msg 19355be1262a4dc6bf3cdf5be581c9e8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 796.088998] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 19355be1262a4dc6bf3cdf5be581c9e8 [ 796.198767] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Expecting reply to msg af341c2e37d843918372c6cef9bd2888 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 796.234393] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg af341c2e37d843918372c6cef9bd2888 [ 796.494885] env[61273]: INFO nova.compute.manager [-] [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] Took 1.03 seconds to deallocate network for instance. [ 796.497358] env[61273]: DEBUG nova.compute.claims [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 796.497672] env[61273]: DEBUG oslo_concurrency.lockutils [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 796.504734] env[61273]: ERROR nova.compute.manager [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 86a1da90-c2b6-4b02-b3e4-60cb6f775b2c, please check neutron logs for more information. [ 796.504734] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 796.504734] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 796.504734] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 796.504734] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 796.504734] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 796.504734] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 796.504734] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 796.504734] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 796.504734] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 796.504734] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 796.504734] env[61273]: ERROR nova.compute.manager raise self.value [ 796.504734] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 796.504734] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 796.504734] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 796.504734] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 796.505167] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 796.505167] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 796.505167] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 86a1da90-c2b6-4b02-b3e4-60cb6f775b2c, please check neutron logs for more information. [ 796.505167] env[61273]: ERROR nova.compute.manager [ 796.505167] env[61273]: Traceback (most recent call last): [ 796.505167] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 796.505167] env[61273]: listener.cb(fileno) [ 796.505167] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 796.505167] env[61273]: result = function(*args, **kwargs) [ 796.505167] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 796.505167] env[61273]: return func(*args, **kwargs) [ 796.505167] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 796.505167] env[61273]: raise e [ 796.505167] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 796.505167] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 796.505167] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 796.505167] env[61273]: created_port_ids = self._update_ports_for_instance( [ 796.505167] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 796.505167] env[61273]: with excutils.save_and_reraise_exception(): [ 796.505167] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 796.505167] env[61273]: self.force_reraise() [ 796.505167] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 796.505167] env[61273]: raise self.value [ 796.505167] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 796.505167] env[61273]: updated_port = self._update_port( [ 796.505167] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 796.505167] env[61273]: _ensure_no_port_binding_failure(port) [ 796.505167] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 796.505167] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 796.505962] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 86a1da90-c2b6-4b02-b3e4-60cb6f775b2c, please check neutron logs for more information. [ 796.505962] env[61273]: Removing descriptor: 15 [ 796.541859] env[61273]: DEBUG oslo_concurrency.lockutils [req-29e736cf-97a0-45e7-92f6-89f21aaa18c3 req-149179e8-5eed-4d64-8b45-91bf420f6b95 service nova] Releasing lock "refresh_cache-e6108eed-93b4-40a5-a61b-67aa5bbe2fda" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 796.542184] env[61273]: DEBUG nova.compute.manager [req-29e736cf-97a0-45e7-92f6-89f21aaa18c3 req-149179e8-5eed-4d64-8b45-91bf420f6b95 service nova] [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] Received event network-vif-deleted-eae1b954-6dd0-4d6c-b829-c5b225294270 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 796.583440] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f2786bfc-4058-4688-bc00-570d55c66988 tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.903s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 796.585858] env[61273]: DEBUG oslo_concurrency.lockutils [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.257s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 796.587167] env[61273]: INFO nova.compute.claims [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 796.588858] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Expecting reply to msg 5c3cadfee63e407db0d75eb8c75c52b6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 796.606746] env[61273]: INFO nova.scheduler.client.report [None req-f2786bfc-4058-4688-bc00-570d55c66988 tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Deleted allocations for instance f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc [ 796.609982] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f2786bfc-4058-4688-bc00-570d55c66988 tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Expecting reply to msg eec1c6876da941d2b798f49eb9060f32 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 796.628780] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5c3cadfee63e407db0d75eb8c75c52b6 [ 796.654739] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eec1c6876da941d2b798f49eb9060f32 [ 796.702326] env[61273]: DEBUG nova.compute.manager [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 796.728029] env[61273]: DEBUG nova.virt.hardware [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 796.728029] env[61273]: DEBUG nova.virt.hardware [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 796.728029] env[61273]: DEBUG nova.virt.hardware [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 796.728237] env[61273]: DEBUG nova.virt.hardware [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 796.728237] env[61273]: DEBUG nova.virt.hardware [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 796.728237] env[61273]: DEBUG nova.virt.hardware [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 796.728237] env[61273]: DEBUG nova.virt.hardware [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 796.728237] env[61273]: DEBUG nova.virt.hardware [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 796.728366] env[61273]: DEBUG nova.virt.hardware [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 796.728366] env[61273]: DEBUG nova.virt.hardware [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 796.728366] env[61273]: DEBUG nova.virt.hardware [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 796.728957] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd2a7ba5-3c2b-4978-855b-ba643f54c12d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.736818] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb39605c-a099-4ea0-a368-b37be3924f5a {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.750682] env[61273]: ERROR nova.compute.manager [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 86a1da90-c2b6-4b02-b3e4-60cb6f775b2c, please check neutron logs for more information. [ 796.750682] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] Traceback (most recent call last): [ 796.750682] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 796.750682] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] yield resources [ 796.750682] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 796.750682] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] self.driver.spawn(context, instance, image_meta, [ 796.750682] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 796.750682] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 796.750682] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 796.750682] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] vm_ref = self.build_virtual_machine(instance, [ 796.750682] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 796.751002] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] vif_infos = vmwarevif.get_vif_info(self._session, [ 796.751002] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 796.751002] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] for vif in network_info: [ 796.751002] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 796.751002] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] return self._sync_wrapper(fn, *args, **kwargs) [ 796.751002] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 796.751002] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] self.wait() [ 796.751002] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 796.751002] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] self[:] = self._gt.wait() [ 796.751002] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 796.751002] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] return self._exit_event.wait() [ 796.751002] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 796.751002] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] current.throw(*self._exc) [ 796.751416] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 796.751416] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] result = function(*args, **kwargs) [ 796.751416] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 796.751416] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] return func(*args, **kwargs) [ 796.751416] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 796.751416] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] raise e [ 796.751416] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 796.751416] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] nwinfo = self.network_api.allocate_for_instance( [ 796.751416] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 796.751416] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] created_port_ids = self._update_ports_for_instance( [ 796.751416] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 796.751416] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] with excutils.save_and_reraise_exception(): [ 796.751416] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 796.751915] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] self.force_reraise() [ 796.751915] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 796.751915] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] raise self.value [ 796.751915] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 796.751915] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] updated_port = self._update_port( [ 796.751915] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 796.751915] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] _ensure_no_port_binding_failure(port) [ 796.751915] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 796.751915] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] raise exception.PortBindingFailed(port_id=port['id']) [ 796.751915] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] nova.exception.PortBindingFailed: Binding failed for port 86a1da90-c2b6-4b02-b3e4-60cb6f775b2c, please check neutron logs for more information. [ 796.751915] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] [ 796.751915] env[61273]: INFO nova.compute.manager [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] Terminating instance [ 796.752984] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Acquiring lock "refresh_cache-05901bd4-2bad-405e-8e73-f6de4393a0f8" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 796.753143] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Acquired lock "refresh_cache-05901bd4-2bad-405e-8e73-f6de4393a0f8" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 796.753305] env[61273]: DEBUG nova.network.neutron [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 796.753713] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Expecting reply to msg 7a4838e62ce54565a831c5037cb74ae0 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 796.768066] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7a4838e62ce54565a831c5037cb74ae0 [ 797.092571] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Expecting reply to msg 0783e401733e485ebc721acf56dec5a1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 797.103619] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0783e401733e485ebc721acf56dec5a1 [ 797.114748] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f2786bfc-4058-4688-bc00-570d55c66988 tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Lock "f82fd962-56c4-4cc2-b1dd-c51e7a71a9fc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.942s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 797.115052] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f2786bfc-4058-4688-bc00-570d55c66988 tempest-ServerShowV254Test-1951211204 tempest-ServerShowV254Test-1951211204-project-member] Expecting reply to msg ebe21159976c4e6994c94fd80fa028c6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 797.130678] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ebe21159976c4e6994c94fd80fa028c6 [ 797.272171] env[61273]: DEBUG nova.network.neutron [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 797.275956] env[61273]: DEBUG nova.compute.manager [req-bfdcc97e-4af0-41ca-8ff7-5a40f19958d9 req-d8bc26ed-1b53-4982-9b05-55e9dae52b46 service nova] [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] Received event network-changed-86a1da90-c2b6-4b02-b3e4-60cb6f775b2c {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 797.276168] env[61273]: DEBUG nova.compute.manager [req-bfdcc97e-4af0-41ca-8ff7-5a40f19958d9 req-d8bc26ed-1b53-4982-9b05-55e9dae52b46 service nova] [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] Refreshing instance network info cache due to event network-changed-86a1da90-c2b6-4b02-b3e4-60cb6f775b2c. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 797.276357] env[61273]: DEBUG oslo_concurrency.lockutils [req-bfdcc97e-4af0-41ca-8ff7-5a40f19958d9 req-d8bc26ed-1b53-4982-9b05-55e9dae52b46 service nova] Acquiring lock "refresh_cache-05901bd4-2bad-405e-8e73-f6de4393a0f8" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 797.328391] env[61273]: DEBUG nova.network.neutron [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 797.328903] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Expecting reply to msg 504a1060b3a840abac9b29b97ed667d7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 797.336728] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 504a1060b3a840abac9b29b97ed667d7 [ 797.831775] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Releasing lock "refresh_cache-05901bd4-2bad-405e-8e73-f6de4393a0f8" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 797.832244] env[61273]: DEBUG nova.compute.manager [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 797.832683] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 797.832737] env[61273]: DEBUG oslo_concurrency.lockutils [req-bfdcc97e-4af0-41ca-8ff7-5a40f19958d9 req-d8bc26ed-1b53-4982-9b05-55e9dae52b46 service nova] Acquired lock "refresh_cache-05901bd4-2bad-405e-8e73-f6de4393a0f8" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 797.832879] env[61273]: DEBUG nova.network.neutron [req-bfdcc97e-4af0-41ca-8ff7-5a40f19958d9 req-d8bc26ed-1b53-4982-9b05-55e9dae52b46 service nova] [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] Refreshing network info cache for port 86a1da90-c2b6-4b02-b3e4-60cb6f775b2c {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 797.833304] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-bfdcc97e-4af0-41ca-8ff7-5a40f19958d9 req-d8bc26ed-1b53-4982-9b05-55e9dae52b46 service nova] Expecting reply to msg 99c705b874b3466f816bfe47641cb3c9 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 797.834113] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-eebd4041-9a0c-4b9e-8bdb-adf0cd385355 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.845142] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74192642-1b4a-4bc8-8e60-c7d43ce9731f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.855822] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 99c705b874b3466f816bfe47641cb3c9 [ 797.870822] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 05901bd4-2bad-405e-8e73-f6de4393a0f8 could not be found. [ 797.871122] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 797.871347] env[61273]: INFO nova.compute.manager [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] Took 0.04 seconds to destroy the instance on the hypervisor. [ 797.871633] env[61273]: DEBUG oslo.service.loopingcall [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 797.874662] env[61273]: DEBUG nova.compute.manager [-] [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 797.875136] env[61273]: DEBUG nova.network.neutron [-] [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 797.895245] env[61273]: DEBUG nova.network.neutron [-] [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 797.895777] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 34d3316406fb4fcdace5798254ad671d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 797.903161] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 34d3316406fb4fcdace5798254ad671d [ 798.018275] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-882bbd27-2a29-45d7-b8a9-e1a30d41bbf5 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.025983] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f2dd7cd-2791-418f-920b-720095408a4c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.056804] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7510d38-e387-4917-bc9f-045bd88636da {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.064247] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-302f2234-b6b9-4971-a3d4-5e0b3f4ee252 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.078987] env[61273]: DEBUG nova.compute.provider_tree [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 798.079533] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Expecting reply to msg 6db49f39b3b247ce8e44486544546a2f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 798.087591] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6db49f39b3b247ce8e44486544546a2f [ 798.354466] env[61273]: DEBUG nova.network.neutron [req-bfdcc97e-4af0-41ca-8ff7-5a40f19958d9 req-d8bc26ed-1b53-4982-9b05-55e9dae52b46 service nova] [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 798.398281] env[61273]: DEBUG nova.network.neutron [-] [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 798.398807] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 17d0682c14b841018cc064ef265a04f9 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 798.407290] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 17d0682c14b841018cc064ef265a04f9 [ 798.432349] env[61273]: DEBUG nova.network.neutron [req-bfdcc97e-4af0-41ca-8ff7-5a40f19958d9 req-d8bc26ed-1b53-4982-9b05-55e9dae52b46 service nova] [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 798.432927] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-bfdcc97e-4af0-41ca-8ff7-5a40f19958d9 req-d8bc26ed-1b53-4982-9b05-55e9dae52b46 service nova] Expecting reply to msg a8fea549b3834196955c76b2f27d9491 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 798.442098] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a8fea549b3834196955c76b2f27d9491 [ 798.588612] env[61273]: DEBUG nova.scheduler.client.report [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 798.591027] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Expecting reply to msg de2578e9e26f48b18a44421de4cbc1f6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 798.610177] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg de2578e9e26f48b18a44421de4cbc1f6 [ 798.900960] env[61273]: INFO nova.compute.manager [-] [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] Took 1.03 seconds to deallocate network for instance. [ 798.903510] env[61273]: DEBUG nova.compute.claims [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 798.903688] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 798.935395] env[61273]: DEBUG oslo_concurrency.lockutils [req-bfdcc97e-4af0-41ca-8ff7-5a40f19958d9 req-d8bc26ed-1b53-4982-9b05-55e9dae52b46 service nova] Releasing lock "refresh_cache-05901bd4-2bad-405e-8e73-f6de4393a0f8" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 798.935691] env[61273]: DEBUG nova.compute.manager [req-bfdcc97e-4af0-41ca-8ff7-5a40f19958d9 req-d8bc26ed-1b53-4982-9b05-55e9dae52b46 service nova] [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] Received event network-vif-deleted-86a1da90-c2b6-4b02-b3e4-60cb6f775b2c {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 799.093332] env[61273]: DEBUG oslo_concurrency.lockutils [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.507s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 799.093861] env[61273]: DEBUG nova.compute.manager [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 799.095582] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Expecting reply to msg c53b717b441f40c28a085aae8070c201 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 799.096693] env[61273]: DEBUG oslo_concurrency.lockutils [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.482s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 799.098044] env[61273]: INFO nova.compute.claims [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] [instance: f6faf064-364d-4d24-9822-220bce47b3f0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 799.099464] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] Expecting reply to msg 98b93c9d1d2b4e159db45eb6f39c2c17 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 799.133973] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c53b717b441f40c28a085aae8070c201 [ 799.135856] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 98b93c9d1d2b4e159db45eb6f39c2c17 [ 799.602242] env[61273]: DEBUG nova.compute.utils [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 799.602961] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Expecting reply to msg 27e26f7d85f94bff8829f342f652ea09 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 799.605874] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] Expecting reply to msg 054f9446793b4b1fb39f6dbcce39225c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 799.606133] env[61273]: DEBUG nova.compute.manager [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 799.606305] env[61273]: DEBUG nova.network.neutron [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 799.614206] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 054f9446793b4b1fb39f6dbcce39225c [ 799.614813] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 27e26f7d85f94bff8829f342f652ea09 [ 799.666711] env[61273]: DEBUG nova.policy [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5cb53d9709f844fe9b79a83c6fd96729', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3051fa87e7e647c5a3d7bdd0e4c63778', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 799.925009] env[61273]: DEBUG nova.network.neutron [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] Successfully created port: 640780fc-ea34-42cf-b119-e6ca43151fad {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 800.107450] env[61273]: DEBUG nova.compute.manager [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 800.109174] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Expecting reply to msg 6dcecda3c7fc4e9bb510dae516cad7aa in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 800.143107] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6dcecda3c7fc4e9bb510dae516cad7aa [ 800.398552] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46cf6575-062a-4c1d-a844-666c3f979690 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.407122] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c52b6e60-a31a-4e2f-a78b-3a8452640cba {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.439964] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70c408a0-c3ba-40e8-96cf-dd4627e232a8 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.447941] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5d8c0b0-e935-40c6-99d0-2b72c634be4f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.462238] env[61273]: DEBUG nova.compute.provider_tree [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 800.462776] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] Expecting reply to msg de63b2a442bb4ef48a03702779bf53a4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 800.472504] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg de63b2a442bb4ef48a03702779bf53a4 [ 800.529739] env[61273]: DEBUG nova.compute.manager [req-c8275e27-535c-4e8c-95d4-5c1196deb99e req-34d865d1-a0dd-4d74-af6f-572e8c8f6f64 service nova] [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] Received event network-changed-640780fc-ea34-42cf-b119-e6ca43151fad {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 800.529954] env[61273]: DEBUG nova.compute.manager [req-c8275e27-535c-4e8c-95d4-5c1196deb99e req-34d865d1-a0dd-4d74-af6f-572e8c8f6f64 service nova] [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] Refreshing instance network info cache due to event network-changed-640780fc-ea34-42cf-b119-e6ca43151fad. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 800.530167] env[61273]: DEBUG oslo_concurrency.lockutils [req-c8275e27-535c-4e8c-95d4-5c1196deb99e req-34d865d1-a0dd-4d74-af6f-572e8c8f6f64 service nova] Acquiring lock "refresh_cache-e8e826d4-2463-41a7-8c63-fd9f47eceea6" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 800.530308] env[61273]: DEBUG oslo_concurrency.lockutils [req-c8275e27-535c-4e8c-95d4-5c1196deb99e req-34d865d1-a0dd-4d74-af6f-572e8c8f6f64 service nova] Acquired lock "refresh_cache-e8e826d4-2463-41a7-8c63-fd9f47eceea6" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 800.530476] env[61273]: DEBUG nova.network.neutron [req-c8275e27-535c-4e8c-95d4-5c1196deb99e req-34d865d1-a0dd-4d74-af6f-572e8c8f6f64 service nova] [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] Refreshing network info cache for port 640780fc-ea34-42cf-b119-e6ca43151fad {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 800.530854] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-c8275e27-535c-4e8c-95d4-5c1196deb99e req-34d865d1-a0dd-4d74-af6f-572e8c8f6f64 service nova] Expecting reply to msg 24ed2d4b6ee744f9b923456557cf4aef in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 800.539630] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 24ed2d4b6ee744f9b923456557cf4aef [ 800.617847] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Expecting reply to msg 44925e682e604030b47c8805bdc0e32a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 800.652983] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 44925e682e604030b47c8805bdc0e32a [ 800.711747] env[61273]: ERROR nova.compute.manager [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 640780fc-ea34-42cf-b119-e6ca43151fad, please check neutron logs for more information. [ 800.711747] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 800.711747] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 800.711747] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 800.711747] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 800.711747] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 800.711747] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 800.711747] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 800.711747] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 800.711747] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 800.711747] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 800.711747] env[61273]: ERROR nova.compute.manager raise self.value [ 800.711747] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 800.711747] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 800.711747] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 800.711747] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 800.712266] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 800.712266] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 800.712266] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 640780fc-ea34-42cf-b119-e6ca43151fad, please check neutron logs for more information. [ 800.712266] env[61273]: ERROR nova.compute.manager [ 800.712266] env[61273]: Traceback (most recent call last): [ 800.712266] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 800.712266] env[61273]: listener.cb(fileno) [ 800.712266] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 800.712266] env[61273]: result = function(*args, **kwargs) [ 800.712266] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 800.712266] env[61273]: return func(*args, **kwargs) [ 800.712266] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 800.712266] env[61273]: raise e [ 800.712266] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 800.712266] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 800.712266] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 800.712266] env[61273]: created_port_ids = self._update_ports_for_instance( [ 800.712266] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 800.712266] env[61273]: with excutils.save_and_reraise_exception(): [ 800.712266] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 800.712266] env[61273]: self.force_reraise() [ 800.712266] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 800.712266] env[61273]: raise self.value [ 800.712266] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 800.712266] env[61273]: updated_port = self._update_port( [ 800.712266] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 800.712266] env[61273]: _ensure_no_port_binding_failure(port) [ 800.712266] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 800.712266] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 800.713270] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 640780fc-ea34-42cf-b119-e6ca43151fad, please check neutron logs for more information. [ 800.713270] env[61273]: Removing descriptor: 15 [ 800.966099] env[61273]: DEBUG nova.scheduler.client.report [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 800.968753] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] Expecting reply to msg 8a1c98492167470090b7eedb093a72dd in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 800.986200] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8a1c98492167470090b7eedb093a72dd [ 801.050276] env[61273]: DEBUG nova.network.neutron [req-c8275e27-535c-4e8c-95d4-5c1196deb99e req-34d865d1-a0dd-4d74-af6f-572e8c8f6f64 service nova] [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 801.111160] env[61273]: DEBUG nova.network.neutron [req-c8275e27-535c-4e8c-95d4-5c1196deb99e req-34d865d1-a0dd-4d74-af6f-572e8c8f6f64 service nova] [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 801.111833] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-c8275e27-535c-4e8c-95d4-5c1196deb99e req-34d865d1-a0dd-4d74-af6f-572e8c8f6f64 service nova] Expecting reply to msg 9afd7e182b3441d98d101892dc48a0aa in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 801.121436] env[61273]: DEBUG nova.compute.manager [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 801.124629] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9afd7e182b3441d98d101892dc48a0aa [ 801.154237] env[61273]: DEBUG nova.virt.hardware [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 801.154599] env[61273]: DEBUG nova.virt.hardware [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 801.154850] env[61273]: DEBUG nova.virt.hardware [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 801.155145] env[61273]: DEBUG nova.virt.hardware [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 801.155380] env[61273]: DEBUG nova.virt.hardware [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 801.155609] env[61273]: DEBUG nova.virt.hardware [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 801.155930] env[61273]: DEBUG nova.virt.hardware [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 801.156206] env[61273]: DEBUG nova.virt.hardware [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 801.156474] env[61273]: DEBUG nova.virt.hardware [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 801.156732] env[61273]: DEBUG nova.virt.hardware [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 801.157008] env[61273]: DEBUG nova.virt.hardware [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 801.158250] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d80f81d5-ab0d-4e9f-aaef-07bc9b4defd6 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.168301] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a5b5589-2a59-45be-86ca-c9a979faecbc {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.185586] env[61273]: ERROR nova.compute.manager [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 640780fc-ea34-42cf-b119-e6ca43151fad, please check neutron logs for more information. [ 801.185586] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] Traceback (most recent call last): [ 801.185586] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 801.185586] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] yield resources [ 801.185586] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 801.185586] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] self.driver.spawn(context, instance, image_meta, [ 801.185586] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 801.185586] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 801.185586] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 801.185586] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] vm_ref = self.build_virtual_machine(instance, [ 801.185586] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 801.186017] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] vif_infos = vmwarevif.get_vif_info(self._session, [ 801.186017] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 801.186017] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] for vif in network_info: [ 801.186017] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 801.186017] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] return self._sync_wrapper(fn, *args, **kwargs) [ 801.186017] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 801.186017] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] self.wait() [ 801.186017] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 801.186017] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] self[:] = self._gt.wait() [ 801.186017] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 801.186017] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] return self._exit_event.wait() [ 801.186017] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 801.186017] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] current.throw(*self._exc) [ 801.186391] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 801.186391] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] result = function(*args, **kwargs) [ 801.186391] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 801.186391] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] return func(*args, **kwargs) [ 801.186391] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 801.186391] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] raise e [ 801.186391] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 801.186391] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] nwinfo = self.network_api.allocate_for_instance( [ 801.186391] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 801.186391] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] created_port_ids = self._update_ports_for_instance( [ 801.186391] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 801.186391] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] with excutils.save_and_reraise_exception(): [ 801.186391] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 801.186763] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] self.force_reraise() [ 801.186763] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 801.186763] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] raise self.value [ 801.186763] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 801.186763] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] updated_port = self._update_port( [ 801.186763] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 801.186763] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] _ensure_no_port_binding_failure(port) [ 801.186763] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 801.186763] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] raise exception.PortBindingFailed(port_id=port['id']) [ 801.186763] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] nova.exception.PortBindingFailed: Binding failed for port 640780fc-ea34-42cf-b119-e6ca43151fad, please check neutron logs for more information. [ 801.186763] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] [ 801.186763] env[61273]: INFO nova.compute.manager [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] Terminating instance [ 801.188786] env[61273]: DEBUG oslo_concurrency.lockutils [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Acquiring lock "refresh_cache-e8e826d4-2463-41a7-8c63-fd9f47eceea6" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 801.472018] env[61273]: DEBUG oslo_concurrency.lockutils [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.375s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 801.472621] env[61273]: DEBUG nova.compute.manager [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] [instance: f6faf064-364d-4d24-9822-220bce47b3f0] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 801.474415] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] Expecting reply to msg 705be25dbe4e4d0b8036d45a99ba769f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 801.475506] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e0c40bbe-42f3-4daf-8cad-399815609a62 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.167s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 801.475958] env[61273]: DEBUG nova.objects.instance [None req-e0c40bbe-42f3-4daf-8cad-399815609a62 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Lazy-loading 'resources' on Instance uuid ca8a38c7-a81c-407a-9558-3d15e492d9fa {{(pid=61273) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 801.476037] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e0c40bbe-42f3-4daf-8cad-399815609a62 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Expecting reply to msg 595c01d2a12241baaaf02af676291da8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 801.493393] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 595c01d2a12241baaaf02af676291da8 [ 801.507631] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 705be25dbe4e4d0b8036d45a99ba769f [ 801.614659] env[61273]: DEBUG oslo_concurrency.lockutils [req-c8275e27-535c-4e8c-95d4-5c1196deb99e req-34d865d1-a0dd-4d74-af6f-572e8c8f6f64 service nova] Releasing lock "refresh_cache-e8e826d4-2463-41a7-8c63-fd9f47eceea6" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 801.615178] env[61273]: DEBUG oslo_concurrency.lockutils [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Acquired lock "refresh_cache-e8e826d4-2463-41a7-8c63-fd9f47eceea6" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 801.615445] env[61273]: DEBUG nova.network.neutron [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 801.615948] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Expecting reply to msg 51ebf7f5e5b145ebb152d77f33a267b3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 801.623609] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 51ebf7f5e5b145ebb152d77f33a267b3 [ 801.978891] env[61273]: DEBUG nova.compute.utils [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 801.979544] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] Expecting reply to msg 8fb0be06aa25422fbc9e8b8585f660e6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 801.981124] env[61273]: DEBUG nova.compute.manager [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] [instance: f6faf064-364d-4d24-9822-220bce47b3f0] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 801.981124] env[61273]: DEBUG nova.network.neutron [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] [instance: f6faf064-364d-4d24-9822-220bce47b3f0] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 801.997158] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8fb0be06aa25422fbc9e8b8585f660e6 [ 802.020478] env[61273]: DEBUG nova.policy [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '239c4c941bf844c5b3c1883246738431', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c9d6bc18a1814fbb8d52af8d48eb4665', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 802.132753] env[61273]: DEBUG nova.network.neutron [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 802.179848] env[61273]: DEBUG nova.network.neutron [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 802.180458] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Expecting reply to msg a64a763d4abf4d2785fec026d3b286c7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 802.188375] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a64a763d4abf4d2785fec026d3b286c7 [ 802.264307] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cce86cdd-a247-4c8c-b61c-1fd3652fde5b {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.272165] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c74d78e-6a9f-4385-834d-f47903d2f075 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.302580] env[61273]: DEBUG nova.network.neutron [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] [instance: f6faf064-364d-4d24-9822-220bce47b3f0] Successfully created port: 34cf5a99-d31e-4c7d-aa80-312081d0f6cf {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 802.305323] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b76edd02-5898-4144-a70b-770a071fd30d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.312262] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ac54cb8-fdba-4c01-b43d-e01865fa64bf {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.324730] env[61273]: DEBUG nova.compute.provider_tree [None req-e0c40bbe-42f3-4daf-8cad-399815609a62 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 802.325206] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e0c40bbe-42f3-4daf-8cad-399815609a62 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Expecting reply to msg 4d1c415c7c08438eb7c2abb6adda5317 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 802.333702] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4d1c415c7c08438eb7c2abb6adda5317 [ 802.484348] env[61273]: DEBUG nova.compute.manager [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] [instance: f6faf064-364d-4d24-9822-220bce47b3f0] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 802.486281] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] Expecting reply to msg 6fd86c2f41b741cc9867fa15b095db9f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 802.522378] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6fd86c2f41b741cc9867fa15b095db9f [ 802.575418] env[61273]: DEBUG nova.compute.manager [req-661b404a-60a3-4c53-bf2f-9b0c5b3a15f4 req-eb975db9-9eda-4da2-b383-c7a550ac7ac4 service nova] [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] Received event network-vif-deleted-640780fc-ea34-42cf-b119-e6ca43151fad {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 802.683205] env[61273]: DEBUG oslo_concurrency.lockutils [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Releasing lock "refresh_cache-e8e826d4-2463-41a7-8c63-fd9f47eceea6" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 802.683641] env[61273]: DEBUG nova.compute.manager [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 802.683835] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 802.684163] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f5bf4114-01e7-4bd0-901a-298abffc37d3 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.693043] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9261cd5d-23dd-4dc0-a4f6-bb0dfa795831 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.712874] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e8e826d4-2463-41a7-8c63-fd9f47eceea6 could not be found. [ 802.713085] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 802.713264] env[61273]: INFO nova.compute.manager [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] Took 0.03 seconds to destroy the instance on the hypervisor. [ 802.713502] env[61273]: DEBUG oslo.service.loopingcall [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 802.713692] env[61273]: DEBUG nova.compute.manager [-] [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 802.713789] env[61273]: DEBUG nova.network.neutron [-] [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 802.727861] env[61273]: DEBUG nova.network.neutron [-] [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 802.728362] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg c5647aef61cf4b17ab233e6bf878eb5d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 802.735837] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c5647aef61cf4b17ab233e6bf878eb5d [ 802.828231] env[61273]: DEBUG nova.scheduler.client.report [None req-e0c40bbe-42f3-4daf-8cad-399815609a62 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 802.830657] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e0c40bbe-42f3-4daf-8cad-399815609a62 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Expecting reply to msg 350dd3ef67684533ac9dff195fe40a94 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 802.846036] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 350dd3ef67684533ac9dff195fe40a94 [ 802.990933] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] Expecting reply to msg 365f6c8a41aa4e06b6b634bf11ed46a0 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 803.022836] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 365f6c8a41aa4e06b6b634bf11ed46a0 [ 803.060497] env[61273]: ERROR nova.compute.manager [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 34cf5a99-d31e-4c7d-aa80-312081d0f6cf, please check neutron logs for more information. [ 803.060497] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 803.060497] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 803.060497] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 803.060497] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 803.060497] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 803.060497] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 803.060497] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 803.060497] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 803.060497] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 803.060497] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 803.060497] env[61273]: ERROR nova.compute.manager raise self.value [ 803.060497] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 803.060497] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 803.060497] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 803.060497] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 803.060931] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 803.060931] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 803.060931] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 34cf5a99-d31e-4c7d-aa80-312081d0f6cf, please check neutron logs for more information. [ 803.060931] env[61273]: ERROR nova.compute.manager [ 803.060931] env[61273]: Traceback (most recent call last): [ 803.060931] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 803.060931] env[61273]: listener.cb(fileno) [ 803.060931] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 803.060931] env[61273]: result = function(*args, **kwargs) [ 803.060931] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 803.060931] env[61273]: return func(*args, **kwargs) [ 803.060931] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 803.060931] env[61273]: raise e [ 803.060931] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 803.060931] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 803.060931] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 803.060931] env[61273]: created_port_ids = self._update_ports_for_instance( [ 803.060931] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 803.060931] env[61273]: with excutils.save_and_reraise_exception(): [ 803.060931] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 803.060931] env[61273]: self.force_reraise() [ 803.060931] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 803.060931] env[61273]: raise self.value [ 803.060931] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 803.060931] env[61273]: updated_port = self._update_port( [ 803.060931] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 803.060931] env[61273]: _ensure_no_port_binding_failure(port) [ 803.060931] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 803.060931] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 803.061790] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 34cf5a99-d31e-4c7d-aa80-312081d0f6cf, please check neutron logs for more information. [ 803.061790] env[61273]: Removing descriptor: 15 [ 803.231700] env[61273]: DEBUG nova.network.neutron [-] [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 803.231700] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg e0819d9f99034be3837d2ff8bb6fb1e6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 803.239406] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e0819d9f99034be3837d2ff8bb6fb1e6 [ 803.333303] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e0c40bbe-42f3-4daf-8cad-399815609a62 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.858s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 803.336265] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 20.336s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 803.339197] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Expecting reply to msg 4d57a441c19b423b8989e81e43ed9197 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 803.356021] env[61273]: INFO nova.scheduler.client.report [None req-e0c40bbe-42f3-4daf-8cad-399815609a62 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Deleted allocations for instance ca8a38c7-a81c-407a-9558-3d15e492d9fa [ 803.358603] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e0c40bbe-42f3-4daf-8cad-399815609a62 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Expecting reply to msg de2c3fdc8f2f4eabac2c57a96380f921 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 803.392373] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4d57a441c19b423b8989e81e43ed9197 [ 803.401865] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg de2c3fdc8f2f4eabac2c57a96380f921 [ 803.495682] env[61273]: DEBUG nova.compute.manager [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] [instance: f6faf064-364d-4d24-9822-220bce47b3f0] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 803.518326] env[61273]: DEBUG nova.virt.hardware [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 803.518808] env[61273]: DEBUG nova.virt.hardware [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 803.519121] env[61273]: DEBUG nova.virt.hardware [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 803.519433] env[61273]: DEBUG nova.virt.hardware [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 803.519731] env[61273]: DEBUG nova.virt.hardware [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 803.520016] env[61273]: DEBUG nova.virt.hardware [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 803.520343] env[61273]: DEBUG nova.virt.hardware [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 803.520634] env[61273]: DEBUG nova.virt.hardware [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 803.520917] env[61273]: DEBUG nova.virt.hardware [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 803.521195] env[61273]: DEBUG nova.virt.hardware [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 803.521490] env[61273]: DEBUG nova.virt.hardware [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 803.522572] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2bff31a-de43-4158-b4e5-6cb96f520d2e {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.531596] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75836885-66ce-48ed-a63c-e61490400364 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.549101] env[61273]: ERROR nova.compute.manager [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] [instance: f6faf064-364d-4d24-9822-220bce47b3f0] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 34cf5a99-d31e-4c7d-aa80-312081d0f6cf, please check neutron logs for more information. [ 803.549101] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] Traceback (most recent call last): [ 803.549101] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 803.549101] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] yield resources [ 803.549101] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 803.549101] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] self.driver.spawn(context, instance, image_meta, [ 803.549101] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 803.549101] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 803.549101] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 803.549101] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] vm_ref = self.build_virtual_machine(instance, [ 803.549101] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 803.549514] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] vif_infos = vmwarevif.get_vif_info(self._session, [ 803.549514] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 803.549514] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] for vif in network_info: [ 803.549514] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 803.549514] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] return self._sync_wrapper(fn, *args, **kwargs) [ 803.549514] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 803.549514] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] self.wait() [ 803.549514] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 803.549514] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] self[:] = self._gt.wait() [ 803.549514] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 803.549514] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] return self._exit_event.wait() [ 803.549514] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 803.549514] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] current.throw(*self._exc) [ 803.549981] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 803.549981] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] result = function(*args, **kwargs) [ 803.549981] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 803.549981] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] return func(*args, **kwargs) [ 803.549981] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 803.549981] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] raise e [ 803.549981] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 803.549981] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] nwinfo = self.network_api.allocate_for_instance( [ 803.549981] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 803.549981] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] created_port_ids = self._update_ports_for_instance( [ 803.549981] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 803.549981] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] with excutils.save_and_reraise_exception(): [ 803.549981] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 803.550359] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] self.force_reraise() [ 803.550359] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 803.550359] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] raise self.value [ 803.550359] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 803.550359] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] updated_port = self._update_port( [ 803.550359] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 803.550359] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] _ensure_no_port_binding_failure(port) [ 803.550359] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 803.550359] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] raise exception.PortBindingFailed(port_id=port['id']) [ 803.550359] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] nova.exception.PortBindingFailed: Binding failed for port 34cf5a99-d31e-4c7d-aa80-312081d0f6cf, please check neutron logs for more information. [ 803.550359] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] [ 803.551331] env[61273]: INFO nova.compute.manager [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] [instance: f6faf064-364d-4d24-9822-220bce47b3f0] Terminating instance [ 803.553937] env[61273]: DEBUG oslo_concurrency.lockutils [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] Acquiring lock "refresh_cache-f6faf064-364d-4d24-9822-220bce47b3f0" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 803.554272] env[61273]: DEBUG oslo_concurrency.lockutils [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] Acquired lock "refresh_cache-f6faf064-364d-4d24-9822-220bce47b3f0" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 803.554575] env[61273]: DEBUG nova.network.neutron [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] [instance: f6faf064-364d-4d24-9822-220bce47b3f0] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 803.555104] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] Expecting reply to msg 7d7cb6b8158f498c993d501ddd9ee4cf in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 803.563167] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7d7cb6b8158f498c993d501ddd9ee4cf [ 803.733254] env[61273]: INFO nova.compute.manager [-] [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] Took 1.02 seconds to deallocate network for instance. [ 803.735766] env[61273]: DEBUG nova.compute.claims [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 803.735947] env[61273]: DEBUG oslo_concurrency.lockutils [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 803.863207] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e0c40bbe-42f3-4daf-8cad-399815609a62 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Lock "ca8a38c7-a81c-407a-9558-3d15e492d9fa" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.252s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 803.863547] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e0c40bbe-42f3-4daf-8cad-399815609a62 tempest-ServerShowV247Test-937081600 tempest-ServerShowV247Test-937081600-project-member] Expecting reply to msg 58837509587f46458987a3a9bc919928 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 803.884282] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 58837509587f46458987a3a9bc919928 [ 804.076188] env[61273]: DEBUG nova.network.neutron [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] [instance: f6faf064-364d-4d24-9822-220bce47b3f0] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 804.111990] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b961e2b5-a344-4802-a371-12b15fd457fd {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.120323] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f190f5fd-055d-4127-9f9e-eab0ea0f639b {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.155575] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78c07eed-2e32-4aff-825b-332e0e4d4bbe {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.168250] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cf08ecf-65fd-431a-bbda-15270d8f25ed {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.178630] env[61273]: DEBUG nova.compute.provider_tree [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 804.178630] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Expecting reply to msg ed31025a9575410da3a25ae875a74d8e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 804.187530] env[61273]: DEBUG nova.network.neutron [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] [instance: f6faf064-364d-4d24-9822-220bce47b3f0] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 804.187530] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] Expecting reply to msg 5149ba089b444711b708c5f22e4c3cee in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 804.191341] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ed31025a9575410da3a25ae875a74d8e [ 804.192552] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5149ba089b444711b708c5f22e4c3cee [ 804.619322] env[61273]: DEBUG nova.compute.manager [req-b66cedbe-464f-42e4-8f5a-ab0bc8833723 req-48718c71-359c-493b-8aca-f6755677ad4d service nova] [instance: f6faf064-364d-4d24-9822-220bce47b3f0] Received event network-changed-34cf5a99-d31e-4c7d-aa80-312081d0f6cf {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 804.619322] env[61273]: DEBUG nova.compute.manager [req-b66cedbe-464f-42e4-8f5a-ab0bc8833723 req-48718c71-359c-493b-8aca-f6755677ad4d service nova] [instance: f6faf064-364d-4d24-9822-220bce47b3f0] Refreshing instance network info cache due to event network-changed-34cf5a99-d31e-4c7d-aa80-312081d0f6cf. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 804.619322] env[61273]: DEBUG oslo_concurrency.lockutils [req-b66cedbe-464f-42e4-8f5a-ab0bc8833723 req-48718c71-359c-493b-8aca-f6755677ad4d service nova] Acquiring lock "refresh_cache-f6faf064-364d-4d24-9822-220bce47b3f0" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 804.682166] env[61273]: DEBUG nova.scheduler.client.report [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 804.684833] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Expecting reply to msg 9cbead80553242da8880d83ab1bbde2c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 804.691523] env[61273]: DEBUG oslo_concurrency.lockutils [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] Releasing lock "refresh_cache-f6faf064-364d-4d24-9822-220bce47b3f0" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 804.691976] env[61273]: DEBUG nova.compute.manager [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] [instance: f6faf064-364d-4d24-9822-220bce47b3f0] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 804.692209] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] [instance: f6faf064-364d-4d24-9822-220bce47b3f0] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 804.692929] env[61273]: DEBUG oslo_concurrency.lockutils [req-b66cedbe-464f-42e4-8f5a-ab0bc8833723 req-48718c71-359c-493b-8aca-f6755677ad4d service nova] Acquired lock "refresh_cache-f6faf064-364d-4d24-9822-220bce47b3f0" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 804.692929] env[61273]: DEBUG nova.network.neutron [req-b66cedbe-464f-42e4-8f5a-ab0bc8833723 req-48718c71-359c-493b-8aca-f6755677ad4d service nova] [instance: f6faf064-364d-4d24-9822-220bce47b3f0] Refreshing network info cache for port 34cf5a99-d31e-4c7d-aa80-312081d0f6cf {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 804.693120] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-b66cedbe-464f-42e4-8f5a-ab0bc8833723 req-48718c71-359c-493b-8aca-f6755677ad4d service nova] Expecting reply to msg 68ddabf4404c4c18a905615afa19a0cd in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 804.693787] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b995af4a-a4a6-4f0a-8faa-8d902ac626b5 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.696626] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9cbead80553242da8880d83ab1bbde2c [ 804.700248] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 68ddabf4404c4c18a905615afa19a0cd [ 804.703134] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25190a71-ba61-4e4b-b988-9d25ae491cb0 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.724417] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] [instance: f6faf064-364d-4d24-9822-220bce47b3f0] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f6faf064-364d-4d24-9822-220bce47b3f0 could not be found. [ 804.724664] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] [instance: f6faf064-364d-4d24-9822-220bce47b3f0] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 804.724850] env[61273]: INFO nova.compute.manager [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] [instance: f6faf064-364d-4d24-9822-220bce47b3f0] Took 0.03 seconds to destroy the instance on the hypervisor. [ 804.725091] env[61273]: DEBUG oslo.service.loopingcall [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 804.725306] env[61273]: DEBUG nova.compute.manager [-] [instance: f6faf064-364d-4d24-9822-220bce47b3f0] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 804.725401] env[61273]: DEBUG nova.network.neutron [-] [instance: f6faf064-364d-4d24-9822-220bce47b3f0] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 804.751304] env[61273]: DEBUG nova.network.neutron [-] [instance: f6faf064-364d-4d24-9822-220bce47b3f0] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 804.751850] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg fbb362e7a87f4f799c271af88cf9b050 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 804.760164] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fbb362e7a87f4f799c271af88cf9b050 [ 805.187930] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.852s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 805.188622] env[61273]: ERROR nova.compute.manager [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port bba752d6-1653-445d-af20-3f45b1d50fbc, please check neutron logs for more information. [ 805.188622] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] Traceback (most recent call last): [ 805.188622] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 805.188622] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] self.driver.spawn(context, instance, image_meta, [ 805.188622] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 805.188622] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] self._vmops.spawn(context, instance, image_meta, injected_files, [ 805.188622] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 805.188622] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] vm_ref = self.build_virtual_machine(instance, [ 805.188622] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 805.188622] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] vif_infos = vmwarevif.get_vif_info(self._session, [ 805.188622] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 805.188971] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] for vif in network_info: [ 805.188971] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 805.188971] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] return self._sync_wrapper(fn, *args, **kwargs) [ 805.188971] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 805.188971] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] self.wait() [ 805.188971] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 805.188971] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] self[:] = self._gt.wait() [ 805.188971] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 805.188971] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] return self._exit_event.wait() [ 805.188971] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 805.188971] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] current.throw(*self._exc) [ 805.188971] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 805.188971] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] result = function(*args, **kwargs) [ 805.189380] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 805.189380] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] return func(*args, **kwargs) [ 805.189380] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 805.189380] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] raise e [ 805.189380] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 805.189380] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] nwinfo = self.network_api.allocate_for_instance( [ 805.189380] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 805.189380] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] created_port_ids = self._update_ports_for_instance( [ 805.189380] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 805.189380] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] with excutils.save_and_reraise_exception(): [ 805.189380] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 805.189380] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] self.force_reraise() [ 805.189380] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 805.189749] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] raise self.value [ 805.189749] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 805.189749] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] updated_port = self._update_port( [ 805.189749] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 805.189749] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] _ensure_no_port_binding_failure(port) [ 805.189749] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 805.189749] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] raise exception.PortBindingFailed(port_id=port['id']) [ 805.189749] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] nova.exception.PortBindingFailed: Binding failed for port bba752d6-1653-445d-af20-3f45b1d50fbc, please check neutron logs for more information. [ 805.189749] env[61273]: ERROR nova.compute.manager [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] [ 805.189749] env[61273]: DEBUG nova.compute.utils [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] Binding failed for port bba752d6-1653-445d-af20-3f45b1d50fbc, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 805.190654] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 20.072s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 805.192482] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Expecting reply to msg f443bb9cb3b64bdab5f2a3ae43852b30 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 805.193732] env[61273]: DEBUG nova.compute.manager [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] Build of instance e62c0b97-cfa7-4acf-bdc5-93d6996c7806 was re-scheduled: Binding failed for port bba752d6-1653-445d-af20-3f45b1d50fbc, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 805.194149] env[61273]: DEBUG nova.compute.manager [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 805.194365] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Acquiring lock "refresh_cache-e62c0b97-cfa7-4acf-bdc5-93d6996c7806" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 805.194508] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Acquired lock "refresh_cache-e62c0b97-cfa7-4acf-bdc5-93d6996c7806" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 805.194659] env[61273]: DEBUG nova.network.neutron [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 805.195017] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Expecting reply to msg f14c9e3fd84c42d89cd5674a988de196 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 805.206035] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f14c9e3fd84c42d89cd5674a988de196 [ 805.212413] env[61273]: DEBUG nova.network.neutron [req-b66cedbe-464f-42e4-8f5a-ab0bc8833723 req-48718c71-359c-493b-8aca-f6755677ad4d service nova] [instance: f6faf064-364d-4d24-9822-220bce47b3f0] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 805.242514] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f443bb9cb3b64bdab5f2a3ae43852b30 [ 805.254336] env[61273]: DEBUG nova.network.neutron [-] [instance: f6faf064-364d-4d24-9822-220bce47b3f0] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 805.254892] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg f58558b01f014f63a6e8f5035aea04a3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 805.263115] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f58558b01f014f63a6e8f5035aea04a3 [ 805.306344] env[61273]: DEBUG nova.network.neutron [req-b66cedbe-464f-42e4-8f5a-ab0bc8833723 req-48718c71-359c-493b-8aca-f6755677ad4d service nova] [instance: f6faf064-364d-4d24-9822-220bce47b3f0] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 805.306847] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-b66cedbe-464f-42e4-8f5a-ab0bc8833723 req-48718c71-359c-493b-8aca-f6755677ad4d service nova] Expecting reply to msg fcaf538b064d41799c2f2d4d9e96144e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 805.314624] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fcaf538b064d41799c2f2d4d9e96144e [ 805.719887] env[61273]: DEBUG nova.network.neutron [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 805.757765] env[61273]: INFO nova.compute.manager [-] [instance: f6faf064-364d-4d24-9822-220bce47b3f0] Took 1.03 seconds to deallocate network for instance. [ 805.760203] env[61273]: DEBUG nova.compute.claims [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] [instance: f6faf064-364d-4d24-9822-220bce47b3f0] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 805.760381] env[61273]: DEBUG oslo_concurrency.lockutils [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 805.808347] env[61273]: DEBUG nova.network.neutron [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 805.808865] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Expecting reply to msg 9c19192ced42402384e8f3443190cf01 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 805.809824] env[61273]: DEBUG oslo_concurrency.lockutils [req-b66cedbe-464f-42e4-8f5a-ab0bc8833723 req-48718c71-359c-493b-8aca-f6755677ad4d service nova] Releasing lock "refresh_cache-f6faf064-364d-4d24-9822-220bce47b3f0" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 805.810095] env[61273]: DEBUG nova.compute.manager [req-b66cedbe-464f-42e4-8f5a-ab0bc8833723 req-48718c71-359c-493b-8aca-f6755677ad4d service nova] [instance: f6faf064-364d-4d24-9822-220bce47b3f0] Received event network-vif-deleted-34cf5a99-d31e-4c7d-aa80-312081d0f6cf {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 805.817065] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9c19192ced42402384e8f3443190cf01 [ 805.963558] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f638d4a9-ba66-4926-8253-b52da37a557a {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.971566] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05e9cddf-1917-477a-9fd8-c1d9bc0c6f8c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.002190] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe46a375-9897-4314-b107-20fffa523058 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.009386] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5b125c1-8ca5-4898-9eb1-73b2594a7e3a {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.022532] env[61273]: DEBUG nova.compute.provider_tree [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 806.023092] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Expecting reply to msg ac84976a0f934cafb55fd59b5019aa38 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 806.029907] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ac84976a0f934cafb55fd59b5019aa38 [ 806.311184] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Releasing lock "refresh_cache-e62c0b97-cfa7-4acf-bdc5-93d6996c7806" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 806.311461] env[61273]: DEBUG nova.compute.manager [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 806.311711] env[61273]: DEBUG nova.compute.manager [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 806.311914] env[61273]: DEBUG nova.network.neutron [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 806.326845] env[61273]: DEBUG nova.network.neutron [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 806.327409] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Expecting reply to msg 8614bef2c1004381b6bef716f72fdf5b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 806.333970] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8614bef2c1004381b6bef716f72fdf5b [ 806.526050] env[61273]: DEBUG nova.scheduler.client.report [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 806.528519] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Expecting reply to msg bd20e1f8eef948fb99c718807fe34f3a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 806.538960] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bd20e1f8eef948fb99c718807fe34f3a [ 806.829553] env[61273]: DEBUG nova.network.neutron [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 806.830233] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Expecting reply to msg b3f8378b487b47c4a32f29350f4832cc in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 806.838717] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b3f8378b487b47c4a32f29350f4832cc [ 807.030976] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.840s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 807.031754] env[61273]: ERROR nova.compute.manager [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port e8d4b3f1-4266-42ad-a1e2-e0967aa5c4c2, please check neutron logs for more information. [ 807.031754] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] Traceback (most recent call last): [ 807.031754] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 807.031754] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] self.driver.spawn(context, instance, image_meta, [ 807.031754] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 807.031754] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 807.031754] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 807.031754] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] vm_ref = self.build_virtual_machine(instance, [ 807.031754] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 807.031754] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] vif_infos = vmwarevif.get_vif_info(self._session, [ 807.031754] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 807.032075] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] for vif in network_info: [ 807.032075] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 807.032075] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] return self._sync_wrapper(fn, *args, **kwargs) [ 807.032075] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 807.032075] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] self.wait() [ 807.032075] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 807.032075] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] self[:] = self._gt.wait() [ 807.032075] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 807.032075] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] return self._exit_event.wait() [ 807.032075] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 807.032075] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] result = hub.switch() [ 807.032075] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 807.032075] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] return self.greenlet.switch() [ 807.032383] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 807.032383] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] result = function(*args, **kwargs) [ 807.032383] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 807.032383] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] return func(*args, **kwargs) [ 807.032383] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 807.032383] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] raise e [ 807.032383] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 807.032383] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] nwinfo = self.network_api.allocate_for_instance( [ 807.032383] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 807.032383] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] created_port_ids = self._update_ports_for_instance( [ 807.032383] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 807.032383] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] with excutils.save_and_reraise_exception(): [ 807.032383] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 807.032795] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] self.force_reraise() [ 807.032795] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 807.032795] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] raise self.value [ 807.032795] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 807.032795] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] updated_port = self._update_port( [ 807.032795] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 807.032795] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] _ensure_no_port_binding_failure(port) [ 807.032795] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 807.032795] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] raise exception.PortBindingFailed(port_id=port['id']) [ 807.032795] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] nova.exception.PortBindingFailed: Binding failed for port e8d4b3f1-4266-42ad-a1e2-e0967aa5c4c2, please check neutron logs for more information. [ 807.032795] env[61273]: ERROR nova.compute.manager [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] [ 807.033054] env[61273]: DEBUG nova.compute.utils [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] Binding failed for port e8d4b3f1-4266-42ad-a1e2-e0967aa5c4c2, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 807.034309] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.951s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 807.035486] env[61273]: INFO nova.compute.claims [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 807.037039] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] Expecting reply to msg 375b6e29cd3f4b4aa82715d723b6dd5d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 807.038399] env[61273]: DEBUG nova.compute.manager [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] Build of instance 7bfdc548-4f10-4525-9ea1-3781f90ca81d was re-scheduled: Binding failed for port e8d4b3f1-4266-42ad-a1e2-e0967aa5c4c2, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 807.038815] env[61273]: DEBUG nova.compute.manager [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 807.039029] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Acquiring lock "refresh_cache-7bfdc548-4f10-4525-9ea1-3781f90ca81d" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 807.039169] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Acquired lock "refresh_cache-7bfdc548-4f10-4525-9ea1-3781f90ca81d" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 807.039321] env[61273]: DEBUG nova.network.neutron [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 807.039716] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Expecting reply to msg 1dd0c2345fd84a85b36a1d91232e736c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 807.046063] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1dd0c2345fd84a85b36a1d91232e736c [ 807.089561] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 375b6e29cd3f4b4aa82715d723b6dd5d [ 807.332367] env[61273]: INFO nova.compute.manager [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] [instance: e62c0b97-cfa7-4acf-bdc5-93d6996c7806] Took 1.02 seconds to deallocate network for instance. [ 807.334486] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Expecting reply to msg d6d1845bba7645098b355fd818d44f10 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 807.366682] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d6d1845bba7645098b355fd818d44f10 [ 807.542579] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] Expecting reply to msg 29d78f346367450182ee547678e8de5e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 807.551362] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 29d78f346367450182ee547678e8de5e [ 807.558600] env[61273]: DEBUG nova.network.neutron [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 807.624814] env[61273]: DEBUG nova.network.neutron [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 807.625309] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Expecting reply to msg e571822343b14492a55c93b403fcca7a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 807.633460] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e571822343b14492a55c93b403fcca7a [ 807.838943] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Expecting reply to msg 5112c3b809c8426991efc04fbe49cd6e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 807.870865] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5112c3b809c8426991efc04fbe49cd6e [ 808.126850] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Releasing lock "refresh_cache-7bfdc548-4f10-4525-9ea1-3781f90ca81d" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 808.127120] env[61273]: DEBUG nova.compute.manager [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 808.127301] env[61273]: DEBUG nova.compute.manager [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 808.127469] env[61273]: DEBUG nova.network.neutron [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 808.143673] env[61273]: DEBUG nova.network.neutron [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 808.144407] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Expecting reply to msg 05280f203ce64ff5b939e019f59172e2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 808.151778] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 05280f203ce64ff5b939e019f59172e2 [ 808.292274] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4e3a0e3-b926-4b99-b3b7-09f45da71dca {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.300057] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-681d7268-707a-4966-81dd-5488cf88b4e9 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.331814] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ca83153-2dd3-45de-ab20-378547b75d10 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.339061] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98b762d3-4c57-42e6-ac5b-4c764f063df0 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.356341] env[61273]: DEBUG nova.compute.provider_tree [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 808.356996] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] Expecting reply to msg 46a0683a720d482ab8d3e60e60a68fd3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 808.364778] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 46a0683a720d482ab8d3e60e60a68fd3 [ 808.367680] env[61273]: INFO nova.scheduler.client.report [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Deleted allocations for instance e62c0b97-cfa7-4acf-bdc5-93d6996c7806 [ 808.374374] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Expecting reply to msg 4ef85e2fcb8943ae9e85085192ead5e4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 808.391706] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4ef85e2fcb8943ae9e85085192ead5e4 [ 808.646842] env[61273]: DEBUG nova.network.neutron [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 808.647373] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Expecting reply to msg 5466f677e0b8490284a96d6c4cec18e3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 808.657063] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5466f677e0b8490284a96d6c4cec18e3 [ 808.859880] env[61273]: DEBUG nova.scheduler.client.report [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 808.862607] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] Expecting reply to msg 458889e85a5d44f2bfd37121b7586da5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 808.875154] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 458889e85a5d44f2bfd37121b7586da5 [ 808.875770] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f5fd2d51-640b-4554-9c67-19f076a954a4 tempest-AttachInterfacesUnderV243Test-1945262616 tempest-AttachInterfacesUnderV243Test-1945262616-project-member] Lock "e62c0b97-cfa7-4acf-bdc5-93d6996c7806" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 124.781s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 808.876423] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg a19c8982f5b74d4ca58e940da7c8326b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 808.885271] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a19c8982f5b74d4ca58e940da7c8326b [ 809.149754] env[61273]: INFO nova.compute.manager [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 7bfdc548-4f10-4525-9ea1-3781f90ca81d] Took 1.02 seconds to deallocate network for instance. [ 809.151301] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Expecting reply to msg adf0adcda61340a9a7472df4efef1122 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 809.183182] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg adf0adcda61340a9a7472df4efef1122 [ 809.365825] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.332s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 809.366371] env[61273]: DEBUG nova.compute.manager [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 809.368065] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] Expecting reply to msg a610f71fb8b04b38a1a1805474e47b05 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 809.369135] env[61273]: DEBUG oslo_concurrency.lockutils [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.352s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 809.370546] env[61273]: INFO nova.compute.claims [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 809.372231] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Expecting reply to msg 31c455d6770240acb71149d103c6a50d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 809.378076] env[61273]: DEBUG nova.compute.manager [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 809.379658] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg 97d73fe472024eb5b41fa24f705488fa in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 809.412554] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 31c455d6770240acb71149d103c6a50d [ 809.420262] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 97d73fe472024eb5b41fa24f705488fa [ 809.424804] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a610f71fb8b04b38a1a1805474e47b05 [ 809.656145] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Expecting reply to msg bfffecccf62048d1bbee643732761f65 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 809.688076] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bfffecccf62048d1bbee643732761f65 [ 809.875725] env[61273]: DEBUG nova.compute.utils [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 809.876424] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] Expecting reply to msg dba36074fbfd4e38874ca54f359d2dd1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 809.877598] env[61273]: DEBUG nova.compute.manager [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 809.877854] env[61273]: DEBUG nova.network.neutron [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 809.881016] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Expecting reply to msg 2e9e31e5b6204a97b46080439f60b4b0 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 809.887657] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dba36074fbfd4e38874ca54f359d2dd1 [ 809.912928] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2e9e31e5b6204a97b46080439f60b4b0 [ 809.972628] env[61273]: DEBUG nova.policy [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3e10b86ebedd4040ad77e02f68c20392', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '14ea1bf75c4d449fb4d3a1d1d39d2777', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 809.980308] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 810.192217] env[61273]: INFO nova.scheduler.client.report [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Deleted allocations for instance 7bfdc548-4f10-4525-9ea1-3781f90ca81d [ 810.200824] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Expecting reply to msg b59dfd7811604bcab202b1ea4a79c979 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 810.214040] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b59dfd7811604bcab202b1ea4a79c979 [ 810.374810] env[61273]: DEBUG nova.network.neutron [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] Successfully created port: 56e44824-11a2-4db8-8e08-d6d6b0ecf51d {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 810.382981] env[61273]: DEBUG nova.compute.manager [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 810.384718] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] Expecting reply to msg 4f2154424e524bac9e1bdeaf47712a1e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 810.422220] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4f2154424e524bac9e1bdeaf47712a1e [ 810.696415] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6343cb55-4971-48f8-a88c-92e6fa2e356e {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.704044] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c9249982-bf5d-45e3-9d8f-4571cb11aae0 tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Lock "7bfdc548-4f10-4525-9ea1-3781f90ca81d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 122.119s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 810.704798] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 215722893f074bad8597cb0bf3342953 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 810.706352] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60433be3-2fd3-4bbc-8ba3-a0341e4b15ec {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.739575] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 215722893f074bad8597cb0bf3342953 [ 810.740644] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e36d27e0-7d37-452f-b687-35b553d0f81e {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.749213] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d2c581f-ee85-41f7-81a7-0e63ac3f6f0f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.764396] env[61273]: DEBUG nova.compute.provider_tree [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 810.765010] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Expecting reply to msg 9583c9f3b9a54d5191bd46b97a712eea in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 810.774999] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9583c9f3b9a54d5191bd46b97a712eea [ 810.891687] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] Expecting reply to msg c4bd3a6832a94836ad8ee1f18e3bff51 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 810.938178] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c4bd3a6832a94836ad8ee1f18e3bff51 [ 811.215017] env[61273]: DEBUG nova.compute.manager [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 6494039f-3716-4174-92c0-15df384e0878] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 811.215017] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 804877d118b941e7a52b5a785e9a7666 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 811.251586] env[61273]: DEBUG nova.compute.manager [req-eb6cd9bb-70b0-46ee-a304-e83678586d7d req-a2949d04-c39b-4170-a2f8-575c9b176e9c service nova] [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] Received event network-changed-56e44824-11a2-4db8-8e08-d6d6b0ecf51d {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 811.251586] env[61273]: DEBUG nova.compute.manager [req-eb6cd9bb-70b0-46ee-a304-e83678586d7d req-a2949d04-c39b-4170-a2f8-575c9b176e9c service nova] [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] Refreshing instance network info cache due to event network-changed-56e44824-11a2-4db8-8e08-d6d6b0ecf51d. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 811.251586] env[61273]: DEBUG oslo_concurrency.lockutils [req-eb6cd9bb-70b0-46ee-a304-e83678586d7d req-a2949d04-c39b-4170-a2f8-575c9b176e9c service nova] Acquiring lock "refresh_cache-d63e20b1-e4ee-4c90-bc94-c4c05917fa1f" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 811.251586] env[61273]: DEBUG oslo_concurrency.lockutils [req-eb6cd9bb-70b0-46ee-a304-e83678586d7d req-a2949d04-c39b-4170-a2f8-575c9b176e9c service nova] Acquired lock "refresh_cache-d63e20b1-e4ee-4c90-bc94-c4c05917fa1f" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 811.251586] env[61273]: DEBUG nova.network.neutron [req-eb6cd9bb-70b0-46ee-a304-e83678586d7d req-a2949d04-c39b-4170-a2f8-575c9b176e9c service nova] [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] Refreshing network info cache for port 56e44824-11a2-4db8-8e08-d6d6b0ecf51d {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 811.252044] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-eb6cd9bb-70b0-46ee-a304-e83678586d7d req-a2949d04-c39b-4170-a2f8-575c9b176e9c service nova] Expecting reply to msg 2eff49a7ce2441d2a40d5a20dbab0f8c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 811.252761] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 804877d118b941e7a52b5a785e9a7666 [ 811.260174] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2eff49a7ce2441d2a40d5a20dbab0f8c [ 811.269822] env[61273]: DEBUG nova.scheduler.client.report [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 811.269983] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Expecting reply to msg 0efc764c8b5444f69e59b15fe249ce66 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 811.282528] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0efc764c8b5444f69e59b15fe249ce66 [ 811.360232] env[61273]: ERROR nova.compute.manager [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 56e44824-11a2-4db8-8e08-d6d6b0ecf51d, please check neutron logs for more information. [ 811.360232] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 811.360232] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 811.360232] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 811.360232] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 811.360232] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 811.360232] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 811.360232] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 811.360232] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 811.360232] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 811.360232] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 811.360232] env[61273]: ERROR nova.compute.manager raise self.value [ 811.360232] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 811.360232] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 811.360232] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 811.360232] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 811.360671] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 811.360671] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 811.360671] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 56e44824-11a2-4db8-8e08-d6d6b0ecf51d, please check neutron logs for more information. [ 811.360671] env[61273]: ERROR nova.compute.manager [ 811.360671] env[61273]: Traceback (most recent call last): [ 811.360671] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 811.360671] env[61273]: listener.cb(fileno) [ 811.360671] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 811.360671] env[61273]: result = function(*args, **kwargs) [ 811.360671] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 811.360671] env[61273]: return func(*args, **kwargs) [ 811.360671] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 811.360671] env[61273]: raise e [ 811.360671] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 811.360671] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 811.360671] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 811.360671] env[61273]: created_port_ids = self._update_ports_for_instance( [ 811.360671] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 811.360671] env[61273]: with excutils.save_and_reraise_exception(): [ 811.360671] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 811.360671] env[61273]: self.force_reraise() [ 811.360671] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 811.360671] env[61273]: raise self.value [ 811.360671] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 811.360671] env[61273]: updated_port = self._update_port( [ 811.360671] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 811.360671] env[61273]: _ensure_no_port_binding_failure(port) [ 811.360671] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 811.360671] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 811.361427] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 56e44824-11a2-4db8-8e08-d6d6b0ecf51d, please check neutron logs for more information. [ 811.361427] env[61273]: Removing descriptor: 15 [ 811.395320] env[61273]: DEBUG nova.compute.manager [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 811.431422] env[61273]: DEBUG nova.virt.hardware [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 811.431707] env[61273]: DEBUG nova.virt.hardware [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 811.432312] env[61273]: DEBUG nova.virt.hardware [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 811.432531] env[61273]: DEBUG nova.virt.hardware [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 811.432689] env[61273]: DEBUG nova.virt.hardware [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 811.432840] env[61273]: DEBUG nova.virt.hardware [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 811.433589] env[61273]: DEBUG nova.virt.hardware [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 811.433771] env[61273]: DEBUG nova.virt.hardware [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 811.433947] env[61273]: DEBUG nova.virt.hardware [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 811.434113] env[61273]: DEBUG nova.virt.hardware [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 811.434287] env[61273]: DEBUG nova.virt.hardware [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 811.435140] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7daf9396-da9f-43eb-a84c-aab7c74add21 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.446765] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3089c84-7a99-4ced-959f-b65b98e53002 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.462087] env[61273]: ERROR nova.compute.manager [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 56e44824-11a2-4db8-8e08-d6d6b0ecf51d, please check neutron logs for more information. [ 811.462087] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] Traceback (most recent call last): [ 811.462087] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 811.462087] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] yield resources [ 811.462087] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 811.462087] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] self.driver.spawn(context, instance, image_meta, [ 811.462087] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 811.462087] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 811.462087] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 811.462087] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] vm_ref = self.build_virtual_machine(instance, [ 811.462087] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 811.462676] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] vif_infos = vmwarevif.get_vif_info(self._session, [ 811.462676] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 811.462676] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] for vif in network_info: [ 811.462676] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 811.462676] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] return self._sync_wrapper(fn, *args, **kwargs) [ 811.462676] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 811.462676] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] self.wait() [ 811.462676] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 811.462676] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] self[:] = self._gt.wait() [ 811.462676] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 811.462676] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] return self._exit_event.wait() [ 811.462676] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 811.462676] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] current.throw(*self._exc) [ 811.463300] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 811.463300] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] result = function(*args, **kwargs) [ 811.463300] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 811.463300] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] return func(*args, **kwargs) [ 811.463300] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 811.463300] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] raise e [ 811.463300] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 811.463300] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] nwinfo = self.network_api.allocate_for_instance( [ 811.463300] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 811.463300] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] created_port_ids = self._update_ports_for_instance( [ 811.463300] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 811.463300] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] with excutils.save_and_reraise_exception(): [ 811.463300] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 811.463844] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] self.force_reraise() [ 811.463844] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 811.463844] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] raise self.value [ 811.463844] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 811.463844] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] updated_port = self._update_port( [ 811.463844] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 811.463844] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] _ensure_no_port_binding_failure(port) [ 811.463844] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 811.463844] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] raise exception.PortBindingFailed(port_id=port['id']) [ 811.463844] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] nova.exception.PortBindingFailed: Binding failed for port 56e44824-11a2-4db8-8e08-d6d6b0ecf51d, please check neutron logs for more information. [ 811.463844] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] [ 811.463844] env[61273]: INFO nova.compute.manager [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] Terminating instance [ 811.465251] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] Acquiring lock "refresh_cache-d63e20b1-e4ee-4c90-bc94-c4c05917fa1f" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 811.736546] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 811.776307] env[61273]: DEBUG oslo_concurrency.lockutils [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.407s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 811.776864] env[61273]: DEBUG nova.compute.manager [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 811.782054] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Expecting reply to msg 9564a0deeaee4159a0197d4d17d4f2ba in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 811.791723] env[61273]: DEBUG oslo_concurrency.lockutils [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.833s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 811.793282] env[61273]: INFO nova.compute.claims [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 811.795007] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg fad7f485d8264b8a8f6b4a26c9e0853b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 811.813353] env[61273]: DEBUG nova.network.neutron [req-eb6cd9bb-70b0-46ee-a304-e83678586d7d req-a2949d04-c39b-4170-a2f8-575c9b176e9c service nova] [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 811.830177] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9564a0deeaee4159a0197d4d17d4f2ba [ 811.866289] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fad7f485d8264b8a8f6b4a26c9e0853b [ 812.103111] env[61273]: DEBUG nova.network.neutron [req-eb6cd9bb-70b0-46ee-a304-e83678586d7d req-a2949d04-c39b-4170-a2f8-575c9b176e9c service nova] [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 812.103635] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-eb6cd9bb-70b0-46ee-a304-e83678586d7d req-a2949d04-c39b-4170-a2f8-575c9b176e9c service nova] Expecting reply to msg 81ff61edbbfd4d149d391b33999321f7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 812.111784] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 81ff61edbbfd4d149d391b33999321f7 [ 812.293881] env[61273]: DEBUG nova.compute.utils [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 812.294530] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Expecting reply to msg 4d10ce2da9934588954ae7e31b4247f5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 812.295525] env[61273]: DEBUG nova.compute.manager [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 812.295695] env[61273]: DEBUG nova.network.neutron [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 812.300215] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg 8bcbe1dbe96f4030a1e76e32df6ecd37 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 812.307044] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4d10ce2da9934588954ae7e31b4247f5 [ 812.312767] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8bcbe1dbe96f4030a1e76e32df6ecd37 [ 812.371312] env[61273]: DEBUG nova.policy [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '438a167fe61344bc9371e3b42d5344c8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c6ac2fa4041e4f24bee18db89143d7dc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 812.606259] env[61273]: DEBUG oslo_concurrency.lockutils [req-eb6cd9bb-70b0-46ee-a304-e83678586d7d req-a2949d04-c39b-4170-a2f8-575c9b176e9c service nova] Releasing lock "refresh_cache-d63e20b1-e4ee-4c90-bc94-c4c05917fa1f" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 812.606751] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] Acquired lock "refresh_cache-d63e20b1-e4ee-4c90-bc94-c4c05917fa1f" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 812.606946] env[61273]: DEBUG nova.network.neutron [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 812.607393] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] Expecting reply to msg 729813c849b1464585818448c47e08be in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 812.618134] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 729813c849b1464585818448c47e08be [ 812.727906] env[61273]: DEBUG nova.network.neutron [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] Successfully created port: 33201922-1943-4bb7-92f4-71b9565e8b26 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 812.799345] env[61273]: DEBUG nova.compute.manager [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 812.801723] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Expecting reply to msg 9764419feef64ebcaa5b62dcae201825 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 812.846867] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9764419feef64ebcaa5b62dcae201825 [ 813.076530] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0a66d6e-6c12-4a98-88c8-676f98b9cf6f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.085433] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f91bc0e-ffc7-471d-9f32-37c295f2e83d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.121718] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61a5dfeb-9f23-4e33-8d53-d4b4e33642dd {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.129317] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4321e2e8-4f5c-410c-8fa1-43a0b2694b51 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.145364] env[61273]: DEBUG nova.compute.provider_tree [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 813.146017] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg f183f98d5d9d423b8153e69e4c288eda in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 813.153221] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f183f98d5d9d423b8153e69e4c288eda [ 813.309992] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Expecting reply to msg 07ba30690c1f4864a02eaeb23dd73ae7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 813.336838] env[61273]: DEBUG nova.network.neutron [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 813.356834] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 07ba30690c1f4864a02eaeb23dd73ae7 [ 813.388089] env[61273]: DEBUG nova.compute.manager [req-222b9d02-b00e-4b92-a1d3-bb10640b32c7 req-14a5c128-8073-4579-890e-c2618ad07560 service nova] [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] Received event network-vif-deleted-56e44824-11a2-4db8-8e08-d6d6b0ecf51d {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 813.425100] env[61273]: DEBUG nova.network.neutron [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 813.425628] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] Expecting reply to msg c354123f1fd240aeab98729ed6e1202a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 813.434856] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c354123f1fd240aeab98729ed6e1202a [ 813.648609] env[61273]: DEBUG nova.scheduler.client.report [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 813.650986] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg 2ad4b2f49cb24422ac61497573780cdb in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 813.666029] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2ad4b2f49cb24422ac61497573780cdb [ 813.778154] env[61273]: ERROR nova.compute.manager [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 33201922-1943-4bb7-92f4-71b9565e8b26, please check neutron logs for more information. [ 813.778154] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 813.778154] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 813.778154] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 813.778154] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 813.778154] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 813.778154] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 813.778154] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 813.778154] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 813.778154] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 813.778154] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 813.778154] env[61273]: ERROR nova.compute.manager raise self.value [ 813.778154] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 813.778154] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 813.778154] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 813.778154] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 813.778645] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 813.778645] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 813.778645] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 33201922-1943-4bb7-92f4-71b9565e8b26, please check neutron logs for more information. [ 813.778645] env[61273]: ERROR nova.compute.manager [ 813.778645] env[61273]: Traceback (most recent call last): [ 813.778645] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 813.778645] env[61273]: listener.cb(fileno) [ 813.778645] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 813.778645] env[61273]: result = function(*args, **kwargs) [ 813.778645] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 813.778645] env[61273]: return func(*args, **kwargs) [ 813.778645] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 813.778645] env[61273]: raise e [ 813.778645] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 813.778645] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 813.778645] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 813.778645] env[61273]: created_port_ids = self._update_ports_for_instance( [ 813.778645] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 813.778645] env[61273]: with excutils.save_and_reraise_exception(): [ 813.778645] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 813.778645] env[61273]: self.force_reraise() [ 813.778645] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 813.778645] env[61273]: raise self.value [ 813.778645] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 813.778645] env[61273]: updated_port = self._update_port( [ 813.778645] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 813.778645] env[61273]: _ensure_no_port_binding_failure(port) [ 813.778645] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 813.778645] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 813.779303] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 33201922-1943-4bb7-92f4-71b9565e8b26, please check neutron logs for more information. [ 813.779303] env[61273]: Removing descriptor: 15 [ 813.816379] env[61273]: DEBUG nova.compute.manager [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 813.843759] env[61273]: DEBUG nova.virt.hardware [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 813.844001] env[61273]: DEBUG nova.virt.hardware [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 813.844172] env[61273]: DEBUG nova.virt.hardware [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 813.844352] env[61273]: DEBUG nova.virt.hardware [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 813.844492] env[61273]: DEBUG nova.virt.hardware [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 813.844657] env[61273]: DEBUG nova.virt.hardware [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 813.844824] env[61273]: DEBUG nova.virt.hardware [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 813.844974] env[61273]: DEBUG nova.virt.hardware [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 813.845135] env[61273]: DEBUG nova.virt.hardware [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 813.845287] env[61273]: DEBUG nova.virt.hardware [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 813.845446] env[61273]: DEBUG nova.virt.hardware [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 813.846301] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd2b013a-133c-4462-a3f3-b8d94e6b2729 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.854705] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9a605ae-9e64-4d73-b524-50894cd4a1a6 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.868703] env[61273]: ERROR nova.compute.manager [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 33201922-1943-4bb7-92f4-71b9565e8b26, please check neutron logs for more information. [ 813.868703] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] Traceback (most recent call last): [ 813.868703] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 813.868703] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] yield resources [ 813.868703] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 813.868703] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] self.driver.spawn(context, instance, image_meta, [ 813.868703] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 813.868703] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] self._vmops.spawn(context, instance, image_meta, injected_files, [ 813.868703] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 813.868703] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] vm_ref = self.build_virtual_machine(instance, [ 813.868703] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 813.869108] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] vif_infos = vmwarevif.get_vif_info(self._session, [ 813.869108] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 813.869108] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] for vif in network_info: [ 813.869108] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 813.869108] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] return self._sync_wrapper(fn, *args, **kwargs) [ 813.869108] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 813.869108] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] self.wait() [ 813.869108] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 813.869108] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] self[:] = self._gt.wait() [ 813.869108] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 813.869108] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] return self._exit_event.wait() [ 813.869108] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 813.869108] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] current.throw(*self._exc) [ 813.869493] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 813.869493] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] result = function(*args, **kwargs) [ 813.869493] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 813.869493] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] return func(*args, **kwargs) [ 813.869493] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 813.869493] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] raise e [ 813.869493] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 813.869493] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] nwinfo = self.network_api.allocate_for_instance( [ 813.869493] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 813.869493] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] created_port_ids = self._update_ports_for_instance( [ 813.869493] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 813.869493] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] with excutils.save_and_reraise_exception(): [ 813.869493] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 813.869912] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] self.force_reraise() [ 813.869912] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 813.869912] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] raise self.value [ 813.869912] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 813.869912] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] updated_port = self._update_port( [ 813.869912] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 813.869912] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] _ensure_no_port_binding_failure(port) [ 813.869912] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 813.869912] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] raise exception.PortBindingFailed(port_id=port['id']) [ 813.869912] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] nova.exception.PortBindingFailed: Binding failed for port 33201922-1943-4bb7-92f4-71b9565e8b26, please check neutron logs for more information. [ 813.869912] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] [ 813.869912] env[61273]: INFO nova.compute.manager [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] Terminating instance [ 813.871086] env[61273]: DEBUG oslo_concurrency.lockutils [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Acquiring lock "refresh_cache-ebc03a5c-984f-4d58-abb0-da555adcfbac" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 813.871243] env[61273]: DEBUG oslo_concurrency.lockutils [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Acquired lock "refresh_cache-ebc03a5c-984f-4d58-abb0-da555adcfbac" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 813.871402] env[61273]: DEBUG nova.network.neutron [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 813.871834] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Expecting reply to msg 7892a930d9a94a0393d884ac75e27dda in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 813.879853] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7892a930d9a94a0393d884ac75e27dda [ 813.928106] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] Releasing lock "refresh_cache-d63e20b1-e4ee-4c90-bc94-c4c05917fa1f" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 813.928560] env[61273]: DEBUG nova.compute.manager [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 813.928769] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 813.929074] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3f40b1ce-45f6-4ae4-b88a-5cddcc13b797 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.938188] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-820a83d4-4a4e-4a6f-9057-265049a64708 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.959255] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d63e20b1-e4ee-4c90-bc94-c4c05917fa1f could not be found. [ 813.959490] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 813.959674] env[61273]: INFO nova.compute.manager [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] Took 0.03 seconds to destroy the instance on the hypervisor. [ 813.959919] env[61273]: DEBUG oslo.service.loopingcall [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 813.960157] env[61273]: DEBUG nova.compute.manager [-] [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 813.960263] env[61273]: DEBUG nova.network.neutron [-] [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 813.980517] env[61273]: DEBUG nova.network.neutron [-] [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 813.981086] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 730d25e957934018a2c1cf20ce85b4a5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 813.988062] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 730d25e957934018a2c1cf20ce85b4a5 [ 814.153729] env[61273]: DEBUG oslo_concurrency.lockutils [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.362s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 814.154330] env[61273]: DEBUG nova.compute.manager [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 814.156101] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg baffbae9adba464980ce1ae94a31938e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 814.164104] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 19.632s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 814.164104] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] Expecting reply to msg bc8abee1a7dd4f52b281dc2945db729e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 814.197070] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bc8abee1a7dd4f52b281dc2945db729e [ 814.197673] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg baffbae9adba464980ce1ae94a31938e [ 814.390383] env[61273]: DEBUG nova.network.neutron [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 814.469123] env[61273]: DEBUG nova.network.neutron [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 814.469123] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Expecting reply to msg 42b61fd4562341fb92889726d73ca630 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 814.476294] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 42b61fd4562341fb92889726d73ca630 [ 814.483023] env[61273]: DEBUG nova.network.neutron [-] [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 814.483501] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 20877c55209641ffa528744e1f604826 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 814.492968] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 20877c55209641ffa528744e1f604826 [ 814.661825] env[61273]: DEBUG nova.compute.utils [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 814.662467] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg a76d44faa9924d9d8338d5832c2b825c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 814.663454] env[61273]: DEBUG nova.compute.manager [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 814.663601] env[61273]: DEBUG nova.network.neutron [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 814.679202] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a76d44faa9924d9d8338d5832c2b825c [ 814.716965] env[61273]: DEBUG nova.policy [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '77bd4b765f214cb8bb602e8e52071531', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4a752424d76840dabab55a9202e7a635', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 814.864657] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Acquiring lock "9adae455-b609-4ecb-8841-43fb4d826f84" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 814.864897] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Lock "9adae455-b609-4ecb-8841-43fb4d826f84" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 814.966882] env[61273]: DEBUG oslo_concurrency.lockutils [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Releasing lock "refresh_cache-ebc03a5c-984f-4d58-abb0-da555adcfbac" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 814.967077] env[61273]: DEBUG nova.compute.manager [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 814.967281] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 814.970102] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0dc5a96c-6a1a-4556-b5ad-abf8f3525580 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.982811] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d243a6c-ed4c-45cd-a9e2-70907e7b9594 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.992765] env[61273]: INFO nova.compute.manager [-] [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] Took 1.03 seconds to deallocate network for instance. [ 814.993527] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f014636a-c097-4995-b592-98e7d245f1ef {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.998437] env[61273]: DEBUG nova.compute.claims [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 814.998541] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 815.003238] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-908fd3ca-c1db-46f7-a82c-a149e3cbee1c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.010907] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ebc03a5c-984f-4d58-abb0-da555adcfbac could not be found. [ 815.011145] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 815.011331] env[61273]: INFO nova.compute.manager [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] Took 0.04 seconds to destroy the instance on the hypervisor. [ 815.011972] env[61273]: DEBUG oslo.service.loopingcall [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 815.012232] env[61273]: DEBUG nova.compute.manager [-] [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 815.012351] env[61273]: DEBUG nova.network.neutron [-] [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 815.040257] env[61273]: DEBUG nova.network.neutron [-] [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 815.040926] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 4f84899d071648c0ad57c6a229ad87a3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 815.042283] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dd4f07e-0a37-4a3c-8ed9-138ca8ea2fd2 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.050505] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4f84899d071648c0ad57c6a229ad87a3 [ 815.051853] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64f9d8bd-cf4c-4660-a451-a96740489bb6 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.065533] env[61273]: DEBUG nova.compute.provider_tree [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 815.065984] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] Expecting reply to msg 1994ff0060fb40eb803b4e685d4a8a6f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 815.073158] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1994ff0060fb40eb803b4e685d4a8a6f [ 815.099379] env[61273]: DEBUG nova.network.neutron [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] Successfully created port: ec48a1a2-a3b3-4ad6-9142-fd30912106b1 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 815.166978] env[61273]: DEBUG nova.compute.manager [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 815.169379] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg c29e3c4f4887484d9aa94ca517419bc5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 815.213313] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c29e3c4f4887484d9aa94ca517419bc5 [ 815.393520] env[61273]: DEBUG oslo_concurrency.lockutils [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Acquiring lock "9cedc314-173e-4686-8ee5-28c2512dbcba" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 815.394377] env[61273]: DEBUG oslo_concurrency.lockutils [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Lock "9cedc314-173e-4686-8ee5-28c2512dbcba" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 815.433159] env[61273]: DEBUG nova.compute.manager [req-9985f2b6-8a6d-4851-a008-f5380dc37005 req-e6a00251-ba46-4590-bf6a-653e229b1f7f service nova] [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] Received event network-changed-33201922-1943-4bb7-92f4-71b9565e8b26 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 815.433362] env[61273]: DEBUG nova.compute.manager [req-9985f2b6-8a6d-4851-a008-f5380dc37005 req-e6a00251-ba46-4590-bf6a-653e229b1f7f service nova] [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] Refreshing instance network info cache due to event network-changed-33201922-1943-4bb7-92f4-71b9565e8b26. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 815.433573] env[61273]: DEBUG oslo_concurrency.lockutils [req-9985f2b6-8a6d-4851-a008-f5380dc37005 req-e6a00251-ba46-4590-bf6a-653e229b1f7f service nova] Acquiring lock "refresh_cache-ebc03a5c-984f-4d58-abb0-da555adcfbac" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 815.433718] env[61273]: DEBUG oslo_concurrency.lockutils [req-9985f2b6-8a6d-4851-a008-f5380dc37005 req-e6a00251-ba46-4590-bf6a-653e229b1f7f service nova] Acquired lock "refresh_cache-ebc03a5c-984f-4d58-abb0-da555adcfbac" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 815.433955] env[61273]: DEBUG nova.network.neutron [req-9985f2b6-8a6d-4851-a008-f5380dc37005 req-e6a00251-ba46-4590-bf6a-653e229b1f7f service nova] [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] Refreshing network info cache for port 33201922-1943-4bb7-92f4-71b9565e8b26 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 815.434336] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-9985f2b6-8a6d-4851-a008-f5380dc37005 req-e6a00251-ba46-4590-bf6a-653e229b1f7f service nova] Expecting reply to msg ad670f45f3e3456790a1a4080e25fd8e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 815.441070] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ad670f45f3e3456790a1a4080e25fd8e [ 815.546284] env[61273]: DEBUG nova.network.neutron [-] [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 815.546798] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg ac8d784f84854b5eb89017e7eb98d336 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 815.555221] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ac8d784f84854b5eb89017e7eb98d336 [ 815.568891] env[61273]: DEBUG nova.scheduler.client.report [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 815.571826] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] Expecting reply to msg f654a30b9db648958bf8290b8848ebfe in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 815.583883] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f654a30b9db648958bf8290b8848ebfe [ 815.674204] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg 0efc0b1af00043da9c450ef190cf776b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 815.709127] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0efc0b1af00043da9c450ef190cf776b [ 815.944926] env[61273]: ERROR nova.compute.manager [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port ec48a1a2-a3b3-4ad6-9142-fd30912106b1, please check neutron logs for more information. [ 815.944926] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 815.944926] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 815.944926] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 815.944926] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 815.944926] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 815.944926] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 815.944926] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 815.944926] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 815.944926] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 815.944926] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 815.944926] env[61273]: ERROR nova.compute.manager raise self.value [ 815.944926] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 815.944926] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 815.944926] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 815.944926] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 815.945389] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 815.945389] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 815.945389] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port ec48a1a2-a3b3-4ad6-9142-fd30912106b1, please check neutron logs for more information. [ 815.945389] env[61273]: ERROR nova.compute.manager [ 815.945389] env[61273]: Traceback (most recent call last): [ 815.945389] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 815.945389] env[61273]: listener.cb(fileno) [ 815.945389] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 815.945389] env[61273]: result = function(*args, **kwargs) [ 815.945389] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 815.945389] env[61273]: return func(*args, **kwargs) [ 815.945389] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 815.945389] env[61273]: raise e [ 815.945389] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 815.945389] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 815.945389] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 815.945389] env[61273]: created_port_ids = self._update_ports_for_instance( [ 815.945389] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 815.945389] env[61273]: with excutils.save_and_reraise_exception(): [ 815.945389] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 815.945389] env[61273]: self.force_reraise() [ 815.945389] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 815.945389] env[61273]: raise self.value [ 815.945389] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 815.945389] env[61273]: updated_port = self._update_port( [ 815.945389] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 815.945389] env[61273]: _ensure_no_port_binding_failure(port) [ 815.945389] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 815.945389] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 815.946107] env[61273]: nova.exception.PortBindingFailed: Binding failed for port ec48a1a2-a3b3-4ad6-9142-fd30912106b1, please check neutron logs for more information. [ 815.946107] env[61273]: Removing descriptor: 15 [ 815.968136] env[61273]: DEBUG nova.network.neutron [req-9985f2b6-8a6d-4851-a008-f5380dc37005 req-e6a00251-ba46-4590-bf6a-653e229b1f7f service nova] [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 816.048661] env[61273]: INFO nova.compute.manager [-] [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] Took 1.04 seconds to deallocate network for instance. [ 816.049796] env[61273]: DEBUG nova.network.neutron [req-9985f2b6-8a6d-4851-a008-f5380dc37005 req-e6a00251-ba46-4590-bf6a-653e229b1f7f service nova] [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 816.050367] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-9985f2b6-8a6d-4851-a008-f5380dc37005 req-e6a00251-ba46-4590-bf6a-653e229b1f7f service nova] Expecting reply to msg acf06ca5a2314f50b7360ac9623e2d73 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 816.058673] env[61273]: DEBUG nova.compute.claims [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 816.058673] env[61273]: DEBUG oslo_concurrency.lockutils [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 816.060727] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg acf06ca5a2314f50b7360ac9623e2d73 [ 816.074832] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.915s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 816.076055] env[61273]: ERROR nova.compute.manager [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 602900da-fbec-4d44-a5ab-e4570ec93784, please check neutron logs for more information. [ 816.076055] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] Traceback (most recent call last): [ 816.076055] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 816.076055] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] self.driver.spawn(context, instance, image_meta, [ 816.076055] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 816.076055] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 816.076055] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 816.076055] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] vm_ref = self.build_virtual_machine(instance, [ 816.076055] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 816.076055] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] vif_infos = vmwarevif.get_vif_info(self._session, [ 816.076055] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 816.076407] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] for vif in network_info: [ 816.076407] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 816.076407] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] return self._sync_wrapper(fn, *args, **kwargs) [ 816.076407] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 816.076407] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] self.wait() [ 816.076407] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 816.076407] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] self[:] = self._gt.wait() [ 816.076407] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 816.076407] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] return self._exit_event.wait() [ 816.076407] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 816.076407] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] result = hub.switch() [ 816.076407] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 816.076407] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] return self.greenlet.switch() [ 816.076755] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 816.076755] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] result = function(*args, **kwargs) [ 816.076755] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 816.076755] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] return func(*args, **kwargs) [ 816.076755] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 816.076755] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] raise e [ 816.076755] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 816.076755] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] nwinfo = self.network_api.allocate_for_instance( [ 816.076755] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 816.076755] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] created_port_ids = self._update_ports_for_instance( [ 816.076755] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 816.076755] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] with excutils.save_and_reraise_exception(): [ 816.076755] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 816.077272] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] self.force_reraise() [ 816.077272] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 816.077272] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] raise self.value [ 816.077272] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 816.077272] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] updated_port = self._update_port( [ 816.077272] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 816.077272] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] _ensure_no_port_binding_failure(port) [ 816.077272] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 816.077272] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] raise exception.PortBindingFailed(port_id=port['id']) [ 816.077272] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] nova.exception.PortBindingFailed: Binding failed for port 602900da-fbec-4d44-a5ab-e4570ec93784, please check neutron logs for more information. [ 816.077272] env[61273]: ERROR nova.compute.manager [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] [ 816.077587] env[61273]: DEBUG nova.compute.utils [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] Binding failed for port 602900da-fbec-4d44-a5ab-e4570ec93784, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 816.077880] env[61273]: DEBUG oslo_concurrency.lockutils [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 19.580s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 816.079803] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Expecting reply to msg 5ec05bbbf85143158447fecf40bf4f89 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 816.092891] env[61273]: DEBUG nova.compute.manager [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] Build of instance 0b400fe1-d0d0-4820-9f56-56ccbad5465a was re-scheduled: Binding failed for port 602900da-fbec-4d44-a5ab-e4570ec93784, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 816.093380] env[61273]: DEBUG nova.compute.manager [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 816.093613] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] Acquiring lock "refresh_cache-0b400fe1-d0d0-4820-9f56-56ccbad5465a" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 816.093762] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] Acquired lock "refresh_cache-0b400fe1-d0d0-4820-9f56-56ccbad5465a" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 816.095105] env[61273]: DEBUG nova.network.neutron [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 816.095568] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] Expecting reply to msg 8d86d444ca8a4083969b15277bbfaaf2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 816.110122] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8d86d444ca8a4083969b15277bbfaaf2 [ 816.122394] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5ec05bbbf85143158447fecf40bf4f89 [ 816.177046] env[61273]: DEBUG nova.compute.manager [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 816.203209] env[61273]: DEBUG nova.virt.hardware [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 816.203449] env[61273]: DEBUG nova.virt.hardware [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 816.203606] env[61273]: DEBUG nova.virt.hardware [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 816.203784] env[61273]: DEBUG nova.virt.hardware [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 816.203933] env[61273]: DEBUG nova.virt.hardware [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 816.204144] env[61273]: DEBUG nova.virt.hardware [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 816.204365] env[61273]: DEBUG nova.virt.hardware [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 816.204522] env[61273]: DEBUG nova.virt.hardware [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 816.204689] env[61273]: DEBUG nova.virt.hardware [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 816.205067] env[61273]: DEBUG nova.virt.hardware [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 816.205269] env[61273]: DEBUG nova.virt.hardware [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 816.206144] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f39ee65-568c-45aa-ba76-07d235da17ab {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.214533] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5d862c6-3729-4c27-b76c-8615152ac0ce {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.229666] env[61273]: ERROR nova.compute.manager [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port ec48a1a2-a3b3-4ad6-9142-fd30912106b1, please check neutron logs for more information. [ 816.229666] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] Traceback (most recent call last): [ 816.229666] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 816.229666] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] yield resources [ 816.229666] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 816.229666] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] self.driver.spawn(context, instance, image_meta, [ 816.229666] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 816.229666] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] self._vmops.spawn(context, instance, image_meta, injected_files, [ 816.229666] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 816.229666] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] vm_ref = self.build_virtual_machine(instance, [ 816.229666] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 816.230063] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] vif_infos = vmwarevif.get_vif_info(self._session, [ 816.230063] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 816.230063] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] for vif in network_info: [ 816.230063] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 816.230063] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] return self._sync_wrapper(fn, *args, **kwargs) [ 816.230063] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 816.230063] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] self.wait() [ 816.230063] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 816.230063] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] self[:] = self._gt.wait() [ 816.230063] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 816.230063] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] return self._exit_event.wait() [ 816.230063] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 816.230063] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] current.throw(*self._exc) [ 816.230451] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 816.230451] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] result = function(*args, **kwargs) [ 816.230451] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 816.230451] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] return func(*args, **kwargs) [ 816.230451] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 816.230451] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] raise e [ 816.230451] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 816.230451] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] nwinfo = self.network_api.allocate_for_instance( [ 816.230451] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 816.230451] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] created_port_ids = self._update_ports_for_instance( [ 816.230451] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 816.230451] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] with excutils.save_and_reraise_exception(): [ 816.230451] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 816.230837] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] self.force_reraise() [ 816.230837] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 816.230837] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] raise self.value [ 816.230837] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 816.230837] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] updated_port = self._update_port( [ 816.230837] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 816.230837] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] _ensure_no_port_binding_failure(port) [ 816.230837] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 816.230837] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] raise exception.PortBindingFailed(port_id=port['id']) [ 816.230837] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] nova.exception.PortBindingFailed: Binding failed for port ec48a1a2-a3b3-4ad6-9142-fd30912106b1, please check neutron logs for more information. [ 816.230837] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] [ 816.230837] env[61273]: INFO nova.compute.manager [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] Terminating instance [ 816.232017] env[61273]: DEBUG oslo_concurrency.lockutils [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Acquiring lock "refresh_cache-7327b3d9-6f7e-4203-b77b-bc0271e3a6ab" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 816.232180] env[61273]: DEBUG oslo_concurrency.lockutils [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Acquired lock "refresh_cache-7327b3d9-6f7e-4203-b77b-bc0271e3a6ab" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 816.232348] env[61273]: DEBUG nova.network.neutron [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 816.232759] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg 230437e90b6349978d892f7570290787 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 816.239134] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 230437e90b6349978d892f7570290787 [ 816.554246] env[61273]: DEBUG oslo_concurrency.lockutils [req-9985f2b6-8a6d-4851-a008-f5380dc37005 req-e6a00251-ba46-4590-bf6a-653e229b1f7f service nova] Releasing lock "refresh_cache-ebc03a5c-984f-4d58-abb0-da555adcfbac" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 816.554521] env[61273]: DEBUG nova.compute.manager [req-9985f2b6-8a6d-4851-a008-f5380dc37005 req-e6a00251-ba46-4590-bf6a-653e229b1f7f service nova] [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] Received event network-vif-deleted-33201922-1943-4bb7-92f4-71b9565e8b26 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 816.618635] env[61273]: DEBUG nova.network.neutron [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 816.708941] env[61273]: DEBUG nova.network.neutron [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 816.709624] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] Expecting reply to msg 9d347b984a8e41269b5d4d00d22488fb in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 816.721189] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9d347b984a8e41269b5d4d00d22488fb [ 816.752140] env[61273]: DEBUG nova.network.neutron [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 816.850259] env[61273]: DEBUG nova.network.neutron [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 816.850804] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg 673abe51dbda4833afd14efc8c78db46 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 816.857374] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c11b1d0a-c6fb-4554-961b-b5d17455a722 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.860785] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 673abe51dbda4833afd14efc8c78db46 [ 816.866628] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d21e46e9-e87d-4c36-bbaf-9d1750225801 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.897238] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-715a9026-8704-4af8-a568-22a14656312f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.904436] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed8cdfed-c3cc-4081-8698-55549e32e407 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.917878] env[61273]: DEBUG nova.compute.provider_tree [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 816.918387] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Expecting reply to msg 71c8e4f44ec74a09849bba702255b545 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 816.926848] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 71c8e4f44ec74a09849bba702255b545 [ 817.213110] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] Releasing lock "refresh_cache-0b400fe1-d0d0-4820-9f56-56ccbad5465a" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 817.213484] env[61273]: DEBUG nova.compute.manager [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 817.213531] env[61273]: DEBUG nova.compute.manager [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 817.213682] env[61273]: DEBUG nova.network.neutron [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 817.228786] env[61273]: DEBUG nova.network.neutron [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 817.229364] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] Expecting reply to msg 61c1f682ac524e71998d611d45c094a4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 817.237736] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 61c1f682ac524e71998d611d45c094a4 [ 817.353276] env[61273]: DEBUG oslo_concurrency.lockutils [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Releasing lock "refresh_cache-7327b3d9-6f7e-4203-b77b-bc0271e3a6ab" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 817.353853] env[61273]: DEBUG nova.compute.manager [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 817.354215] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 817.354618] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b2b5b10b-8c0a-4758-b1a5-729b0547aa70 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.364807] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ea1e4af-204e-4523-8748-61907e9f0d5f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.385847] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab could not be found. [ 817.386168] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 817.386471] env[61273]: INFO nova.compute.manager [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] Took 0.03 seconds to destroy the instance on the hypervisor. [ 817.386807] env[61273]: DEBUG oslo.service.loopingcall [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 817.387112] env[61273]: DEBUG nova.compute.manager [-] [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 817.387301] env[61273]: DEBUG nova.network.neutron [-] [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 817.402141] env[61273]: DEBUG nova.network.neutron [-] [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 817.402982] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 81da4f87a0b64e248efb68eeb4fcd8dc in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 817.409771] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 81da4f87a0b64e248efb68eeb4fcd8dc [ 817.420656] env[61273]: DEBUG nova.scheduler.client.report [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 817.423098] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Expecting reply to msg 5186b1ed61d0407fa0051b1a519ad247 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 817.434944] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5186b1ed61d0407fa0051b1a519ad247 [ 817.457942] env[61273]: DEBUG nova.compute.manager [req-ee4b1445-16b8-41bf-9bbb-45db5c7ddb88 req-f04636fd-f17d-4fdc-a953-39cfe402ca6e service nova] [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] Received event network-changed-ec48a1a2-a3b3-4ad6-9142-fd30912106b1 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 817.458109] env[61273]: DEBUG nova.compute.manager [req-ee4b1445-16b8-41bf-9bbb-45db5c7ddb88 req-f04636fd-f17d-4fdc-a953-39cfe402ca6e service nova] [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] Refreshing instance network info cache due to event network-changed-ec48a1a2-a3b3-4ad6-9142-fd30912106b1. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 817.458323] env[61273]: DEBUG oslo_concurrency.lockutils [req-ee4b1445-16b8-41bf-9bbb-45db5c7ddb88 req-f04636fd-f17d-4fdc-a953-39cfe402ca6e service nova] Acquiring lock "refresh_cache-7327b3d9-6f7e-4203-b77b-bc0271e3a6ab" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 817.458467] env[61273]: DEBUG oslo_concurrency.lockutils [req-ee4b1445-16b8-41bf-9bbb-45db5c7ddb88 req-f04636fd-f17d-4fdc-a953-39cfe402ca6e service nova] Acquired lock "refresh_cache-7327b3d9-6f7e-4203-b77b-bc0271e3a6ab" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 817.458668] env[61273]: DEBUG nova.network.neutron [req-ee4b1445-16b8-41bf-9bbb-45db5c7ddb88 req-f04636fd-f17d-4fdc-a953-39cfe402ca6e service nova] [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] Refreshing network info cache for port ec48a1a2-a3b3-4ad6-9142-fd30912106b1 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 817.459074] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-ee4b1445-16b8-41bf-9bbb-45db5c7ddb88 req-f04636fd-f17d-4fdc-a953-39cfe402ca6e service nova] Expecting reply to msg 1172c9aeca5e498a873761bd5451439b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 817.465138] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1172c9aeca5e498a873761bd5451439b [ 817.732026] env[61273]: DEBUG nova.network.neutron [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 817.732565] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] Expecting reply to msg a06b843490944369b76b3020c86fdd0a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 817.746219] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a06b843490944369b76b3020c86fdd0a [ 817.905808] env[61273]: DEBUG nova.network.neutron [-] [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 817.906288] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 5a0d9e33a37c4e2a993bee8c430a090b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 817.914891] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5a0d9e33a37c4e2a993bee8c430a090b [ 817.925210] env[61273]: DEBUG oslo_concurrency.lockutils [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.847s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 817.925810] env[61273]: ERROR nova.compute.manager [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port eae1b954-6dd0-4d6c-b829-c5b225294270, please check neutron logs for more information. [ 817.925810] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] Traceback (most recent call last): [ 817.925810] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 817.925810] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] self.driver.spawn(context, instance, image_meta, [ 817.925810] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 817.925810] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] self._vmops.spawn(context, instance, image_meta, injected_files, [ 817.925810] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 817.925810] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] vm_ref = self.build_virtual_machine(instance, [ 817.925810] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 817.925810] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] vif_infos = vmwarevif.get_vif_info(self._session, [ 817.925810] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 817.926143] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] for vif in network_info: [ 817.926143] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 817.926143] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] return self._sync_wrapper(fn, *args, **kwargs) [ 817.926143] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 817.926143] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] self.wait() [ 817.926143] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 817.926143] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] self[:] = self._gt.wait() [ 817.926143] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 817.926143] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] return self._exit_event.wait() [ 817.926143] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 817.926143] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] current.throw(*self._exc) [ 817.926143] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 817.926143] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] result = function(*args, **kwargs) [ 817.926523] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 817.926523] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] return func(*args, **kwargs) [ 817.926523] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 817.926523] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] raise e [ 817.926523] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 817.926523] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] nwinfo = self.network_api.allocate_for_instance( [ 817.926523] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 817.926523] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] created_port_ids = self._update_ports_for_instance( [ 817.926523] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 817.926523] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] with excutils.save_and_reraise_exception(): [ 817.926523] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 817.926523] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] self.force_reraise() [ 817.926523] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 817.926937] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] raise self.value [ 817.926937] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 817.926937] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] updated_port = self._update_port( [ 817.926937] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 817.926937] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] _ensure_no_port_binding_failure(port) [ 817.926937] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 817.926937] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] raise exception.PortBindingFailed(port_id=port['id']) [ 817.926937] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] nova.exception.PortBindingFailed: Binding failed for port eae1b954-6dd0-4d6c-b829-c5b225294270, please check neutron logs for more information. [ 817.926937] env[61273]: ERROR nova.compute.manager [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] [ 817.926937] env[61273]: DEBUG nova.compute.utils [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] Binding failed for port eae1b954-6dd0-4d6c-b829-c5b225294270, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 817.927935] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 19.024s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 817.930130] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Expecting reply to msg 5868efca89c049ecb1ee1da849a38763 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 817.931223] env[61273]: DEBUG nova.compute.manager [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] Build of instance e6108eed-93b4-40a5-a61b-67aa5bbe2fda was re-scheduled: Binding failed for port eae1b954-6dd0-4d6c-b829-c5b225294270, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 817.931718] env[61273]: DEBUG nova.compute.manager [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 817.931930] env[61273]: DEBUG oslo_concurrency.lockutils [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Acquiring lock "refresh_cache-e6108eed-93b4-40a5-a61b-67aa5bbe2fda" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 817.932111] env[61273]: DEBUG oslo_concurrency.lockutils [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Acquired lock "refresh_cache-e6108eed-93b4-40a5-a61b-67aa5bbe2fda" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 817.932278] env[61273]: DEBUG nova.network.neutron [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 817.932685] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Expecting reply to msg fd280f0037e34afc952ab6aa19290a0f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 817.938443] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fd280f0037e34afc952ab6aa19290a0f [ 817.971707] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5868efca89c049ecb1ee1da849a38763 [ 817.980726] env[61273]: DEBUG nova.network.neutron [req-ee4b1445-16b8-41bf-9bbb-45db5c7ddb88 req-f04636fd-f17d-4fdc-a953-39cfe402ca6e service nova] [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 818.083286] env[61273]: DEBUG nova.network.neutron [req-ee4b1445-16b8-41bf-9bbb-45db5c7ddb88 req-f04636fd-f17d-4fdc-a953-39cfe402ca6e service nova] [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 818.083860] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-ee4b1445-16b8-41bf-9bbb-45db5c7ddb88 req-f04636fd-f17d-4fdc-a953-39cfe402ca6e service nova] Expecting reply to msg 77c82f5ef989487988d9733828f083a0 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 818.092323] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 77c82f5ef989487988d9733828f083a0 [ 818.238341] env[61273]: INFO nova.compute.manager [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] [instance: 0b400fe1-d0d0-4820-9f56-56ccbad5465a] Took 1.02 seconds to deallocate network for instance. [ 818.238341] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] Expecting reply to msg c138d420370d453ea7026c68b3bf945b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 818.277992] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c138d420370d453ea7026c68b3bf945b [ 818.408257] env[61273]: INFO nova.compute.manager [-] [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] Took 1.02 seconds to deallocate network for instance. [ 818.410755] env[61273]: DEBUG nova.compute.claims [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 818.411085] env[61273]: DEBUG oslo_concurrency.lockutils [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 818.453826] env[61273]: DEBUG nova.network.neutron [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 818.537615] env[61273]: DEBUG nova.network.neutron [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 818.538269] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Expecting reply to msg 2ffd5fc7198f4b8eb04ef2b6d4087413 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 818.546608] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2ffd5fc7198f4b8eb04ef2b6d4087413 [ 818.586373] env[61273]: DEBUG oslo_concurrency.lockutils [req-ee4b1445-16b8-41bf-9bbb-45db5c7ddb88 req-f04636fd-f17d-4fdc-a953-39cfe402ca6e service nova] Releasing lock "refresh_cache-7327b3d9-6f7e-4203-b77b-bc0271e3a6ab" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 818.586373] env[61273]: DEBUG nova.compute.manager [req-ee4b1445-16b8-41bf-9bbb-45db5c7ddb88 req-f04636fd-f17d-4fdc-a953-39cfe402ca6e service nova] [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] Received event network-vif-deleted-ec48a1a2-a3b3-4ad6-9142-fd30912106b1 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 818.743304] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] Expecting reply to msg f218844df0714b2db77ffc435492fe7d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 818.764485] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4c0a743-c3a0-4e06-b958-3228aaef8239 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.773506] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56cc9b00-b821-4796-8b3e-45ddd2190a45 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.807694] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f218844df0714b2db77ffc435492fe7d [ 818.808895] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfea7a00-22eb-40a9-b111-fd3910f1d56d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.816930] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a196b7ec-4b7b-48fc-af70-995f95d49457 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.830711] env[61273]: DEBUG nova.compute.provider_tree [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 818.831335] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Expecting reply to msg 22a4f61cb7054c139daac0635eb394e5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 818.844239] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 22a4f61cb7054c139daac0635eb394e5 [ 819.040998] env[61273]: DEBUG oslo_concurrency.lockutils [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Releasing lock "refresh_cache-e6108eed-93b4-40a5-a61b-67aa5bbe2fda" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 819.041393] env[61273]: DEBUG nova.compute.manager [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 819.041744] env[61273]: DEBUG nova.compute.manager [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 819.042045] env[61273]: DEBUG nova.network.neutron [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 819.075339] env[61273]: DEBUG nova.network.neutron [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 819.076131] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Expecting reply to msg e893234d9a63456bbc1dc527fbad9b6c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 819.085652] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e893234d9a63456bbc1dc527fbad9b6c [ 819.271763] env[61273]: INFO nova.scheduler.client.report [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] Deleted allocations for instance 0b400fe1-d0d0-4820-9f56-56ccbad5465a [ 819.273590] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] Expecting reply to msg a7117b6e63904278a3a87b2f4732071a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 819.293506] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a7117b6e63904278a3a87b2f4732071a [ 819.333940] env[61273]: DEBUG nova.scheduler.client.report [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 819.336882] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Expecting reply to msg dfd0522f49f1419f8e85315ce477571c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 819.347715] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dfd0522f49f1419f8e85315ce477571c [ 819.579070] env[61273]: DEBUG nova.network.neutron [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 819.579917] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Expecting reply to msg 05f69ed047b443d795d9c73575de7501 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 819.588138] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 05f69ed047b443d795d9c73575de7501 [ 819.775173] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5d9d0889-4107-43cf-a46d-69501800ab7f tempest-ServerAddressesNegativeTestJSON-1057268752 tempest-ServerAddressesNegativeTestJSON-1057268752-project-member] Lock "0b400fe1-d0d0-4820-9f56-56ccbad5465a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 123.807s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 819.775979] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg 1d7abccd37f341c7ba396f4075467e40 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 819.786205] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1d7abccd37f341c7ba396f4075467e40 [ 819.840028] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.912s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 819.840859] env[61273]: ERROR nova.compute.manager [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 86a1da90-c2b6-4b02-b3e4-60cb6f775b2c, please check neutron logs for more information. [ 819.840859] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] Traceback (most recent call last): [ 819.840859] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 819.840859] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] self.driver.spawn(context, instance, image_meta, [ 819.840859] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 819.840859] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 819.840859] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 819.840859] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] vm_ref = self.build_virtual_machine(instance, [ 819.840859] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 819.840859] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] vif_infos = vmwarevif.get_vif_info(self._session, [ 819.840859] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 819.841304] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] for vif in network_info: [ 819.841304] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 819.841304] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] return self._sync_wrapper(fn, *args, **kwargs) [ 819.841304] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 819.841304] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] self.wait() [ 819.841304] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 819.841304] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] self[:] = self._gt.wait() [ 819.841304] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 819.841304] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] return self._exit_event.wait() [ 819.841304] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 819.841304] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] current.throw(*self._exc) [ 819.841304] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 819.841304] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] result = function(*args, **kwargs) [ 819.841621] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 819.841621] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] return func(*args, **kwargs) [ 819.841621] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 819.841621] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] raise e [ 819.841621] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 819.841621] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] nwinfo = self.network_api.allocate_for_instance( [ 819.841621] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 819.841621] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] created_port_ids = self._update_ports_for_instance( [ 819.841621] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 819.841621] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] with excutils.save_and_reraise_exception(): [ 819.841621] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 819.841621] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] self.force_reraise() [ 819.841621] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 819.841966] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] raise self.value [ 819.841966] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 819.841966] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] updated_port = self._update_port( [ 819.841966] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 819.841966] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] _ensure_no_port_binding_failure(port) [ 819.841966] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 819.841966] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] raise exception.PortBindingFailed(port_id=port['id']) [ 819.841966] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] nova.exception.PortBindingFailed: Binding failed for port 86a1da90-c2b6-4b02-b3e4-60cb6f775b2c, please check neutron logs for more information. [ 819.841966] env[61273]: ERROR nova.compute.manager [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] [ 819.842229] env[61273]: DEBUG nova.compute.utils [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] Binding failed for port 86a1da90-c2b6-4b02-b3e4-60cb6f775b2c, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 819.843486] env[61273]: DEBUG oslo_concurrency.lockutils [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.107s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 819.845313] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Expecting reply to msg 9c9899c2d56b4755ae32891a591513d1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 819.846532] env[61273]: DEBUG nova.compute.manager [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] Build of instance 05901bd4-2bad-405e-8e73-f6de4393a0f8 was re-scheduled: Binding failed for port 86a1da90-c2b6-4b02-b3e4-60cb6f775b2c, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 819.846972] env[61273]: DEBUG nova.compute.manager [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 819.847189] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Acquiring lock "refresh_cache-05901bd4-2bad-405e-8e73-f6de4393a0f8" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 819.847337] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Acquired lock "refresh_cache-05901bd4-2bad-405e-8e73-f6de4393a0f8" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.847497] env[61273]: DEBUG nova.network.neutron [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 819.847866] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Expecting reply to msg c8aa1739d8834059bd1c8d46681cb030 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 819.853794] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c8aa1739d8834059bd1c8d46681cb030 [ 819.878920] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9c9899c2d56b4755ae32891a591513d1 [ 820.082930] env[61273]: INFO nova.compute.manager [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: e6108eed-93b4-40a5-a61b-67aa5bbe2fda] Took 1.04 seconds to deallocate network for instance. [ 820.084873] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Expecting reply to msg c73106f21fc94835a11b8b3fd7fe5eb7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 820.121510] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c73106f21fc94835a11b8b3fd7fe5eb7 [ 820.278137] env[61273]: DEBUG nova.compute.manager [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 9debd209-244f-472a-b9d6-cf63bba98839] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 820.280036] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg 76e1880511d74fd78b11cf6d33f45576 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 820.309344] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 76e1880511d74fd78b11cf6d33f45576 [ 820.406259] env[61273]: DEBUG nova.network.neutron [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 820.452116] env[61273]: DEBUG nova.network.neutron [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 820.452639] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Expecting reply to msg d0aaa127bb314088b402207ea1c37a46 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 820.461190] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d0aaa127bb314088b402207ea1c37a46 [ 820.589240] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Expecting reply to msg 8aafb64a18d84cc8a35fb0a224c8ec3b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 820.594773] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae90ef75-3de4-43f7-b084-51aa89396b4c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.603128] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42839a87-5f87-4ac3-92ca-3811ffbc246e {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.633957] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8aafb64a18d84cc8a35fb0a224c8ec3b [ 820.634844] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db38e5f9-d7f0-4ebb-b3a8-b89cc77adb2c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.642402] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aef25d66-dcfe-4f9e-a836-53186a3d74d8 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.655357] env[61273]: DEBUG nova.compute.provider_tree [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 820.655816] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Expecting reply to msg 8d15f31335dc4a698a0c61aabb5b64e4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 820.663211] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8d15f31335dc4a698a0c61aabb5b64e4 [ 820.801882] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 820.955737] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Releasing lock "refresh_cache-05901bd4-2bad-405e-8e73-f6de4393a0f8" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 820.955990] env[61273]: DEBUG nova.compute.manager [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 820.956182] env[61273]: DEBUG nova.compute.manager [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 820.956383] env[61273]: DEBUG nova.network.neutron [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 820.982104] env[61273]: DEBUG nova.network.neutron [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 820.982761] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Expecting reply to msg 8db15f0594344c43abceba4c1cc36b89 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 820.992170] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8db15f0594344c43abceba4c1cc36b89 [ 821.115078] env[61273]: INFO nova.scheduler.client.report [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Deleted allocations for instance e6108eed-93b4-40a5-a61b-67aa5bbe2fda [ 821.121525] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Expecting reply to msg 12460e96627b4e9ebd7aeeef89543959 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 821.134159] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 12460e96627b4e9ebd7aeeef89543959 [ 821.158890] env[61273]: DEBUG nova.scheduler.client.report [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 821.161883] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Expecting reply to msg 7e12cbca083948e38328542b9e80e9fb in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 821.189776] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7e12cbca083948e38328542b9e80e9fb [ 821.485259] env[61273]: DEBUG nova.network.neutron [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 821.485783] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Expecting reply to msg ef21b7894c1043298d5d9ad09f6c00d6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 821.497059] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ef21b7894c1043298d5d9ad09f6c00d6 [ 821.623891] env[61273]: DEBUG oslo_concurrency.lockutils [None req-369926bc-5a2c-43b5-8bba-a7557c0d2358 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Lock "e6108eed-93b4-40a5-a61b-67aa5bbe2fda" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 122.474s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 821.624509] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg 021d0828fce943c58c959c9462deafe2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 821.638469] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 021d0828fce943c58c959c9462deafe2 [ 821.664844] env[61273]: DEBUG oslo_concurrency.lockutils [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.821s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 821.665486] env[61273]: ERROR nova.compute.manager [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 640780fc-ea34-42cf-b119-e6ca43151fad, please check neutron logs for more information. [ 821.665486] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] Traceback (most recent call last): [ 821.665486] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 821.665486] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] self.driver.spawn(context, instance, image_meta, [ 821.665486] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 821.665486] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 821.665486] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 821.665486] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] vm_ref = self.build_virtual_machine(instance, [ 821.665486] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 821.665486] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] vif_infos = vmwarevif.get_vif_info(self._session, [ 821.665486] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 821.665808] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] for vif in network_info: [ 821.665808] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 821.665808] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] return self._sync_wrapper(fn, *args, **kwargs) [ 821.665808] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 821.665808] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] self.wait() [ 821.665808] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 821.665808] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] self[:] = self._gt.wait() [ 821.665808] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 821.665808] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] return self._exit_event.wait() [ 821.665808] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 821.665808] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] current.throw(*self._exc) [ 821.665808] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 821.665808] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] result = function(*args, **kwargs) [ 821.666146] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 821.666146] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] return func(*args, **kwargs) [ 821.666146] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 821.666146] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] raise e [ 821.666146] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 821.666146] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] nwinfo = self.network_api.allocate_for_instance( [ 821.666146] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 821.666146] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] created_port_ids = self._update_ports_for_instance( [ 821.666146] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 821.666146] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] with excutils.save_and_reraise_exception(): [ 821.666146] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 821.666146] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] self.force_reraise() [ 821.666146] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 821.666487] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] raise self.value [ 821.666487] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 821.666487] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] updated_port = self._update_port( [ 821.666487] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 821.666487] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] _ensure_no_port_binding_failure(port) [ 821.666487] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 821.666487] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] raise exception.PortBindingFailed(port_id=port['id']) [ 821.666487] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] nova.exception.PortBindingFailed: Binding failed for port 640780fc-ea34-42cf-b119-e6ca43151fad, please check neutron logs for more information. [ 821.666487] env[61273]: ERROR nova.compute.manager [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] [ 821.666487] env[61273]: DEBUG nova.compute.utils [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] Binding failed for port 640780fc-ea34-42cf-b119-e6ca43151fad, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 821.667335] env[61273]: DEBUG oslo_concurrency.lockutils [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.907s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 821.669530] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] Expecting reply to msg fe2015ddc3e74d32989a748f6b0c7397 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 821.670318] env[61273]: DEBUG nova.compute.manager [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] Build of instance e8e826d4-2463-41a7-8c63-fd9f47eceea6 was re-scheduled: Binding failed for port 640780fc-ea34-42cf-b119-e6ca43151fad, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 821.670722] env[61273]: DEBUG nova.compute.manager [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 821.670976] env[61273]: DEBUG oslo_concurrency.lockutils [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Acquiring lock "refresh_cache-e8e826d4-2463-41a7-8c63-fd9f47eceea6" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 821.671130] env[61273]: DEBUG oslo_concurrency.lockutils [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Acquired lock "refresh_cache-e8e826d4-2463-41a7-8c63-fd9f47eceea6" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 821.671285] env[61273]: DEBUG nova.network.neutron [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 821.671668] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Expecting reply to msg 4a17640220f84818ae4e4d0bcd40e097 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 821.678232] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4a17640220f84818ae4e4d0bcd40e097 [ 821.713933] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fe2015ddc3e74d32989a748f6b0c7397 [ 821.988495] env[61273]: INFO nova.compute.manager [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: 05901bd4-2bad-405e-8e73-f6de4393a0f8] Took 1.03 seconds to deallocate network for instance. [ 821.990175] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Expecting reply to msg 8bcc9bd252f245f38a829d31301dccdd in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 822.032548] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8bcc9bd252f245f38a829d31301dccdd [ 822.126938] env[61273]: DEBUG nova.compute.manager [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 822.128827] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg 5c8e953b408d4d618c08d771b45086f1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 822.181747] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5c8e953b408d4d618c08d771b45086f1 [ 822.333763] env[61273]: DEBUG nova.network.neutron [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 822.405978] env[61273]: DEBUG nova.network.neutron [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 822.406456] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Expecting reply to msg 6c3694077d1940f6954787e2a6d7fb3c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 822.415128] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6c3694077d1940f6954787e2a6d7fb3c [ 822.466415] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cfa4a1f-4a23-42dd-9813-96ad2bc7d6d5 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.475114] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5168c249-c628-4a8d-b91f-4edce99cc76c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.506434] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Expecting reply to msg 20948b6e8b454bbd9c5d4dcb9691f922 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 822.513023] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca357f20-80cc-491b-a735-28667df3ef0b {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.520243] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38b1712e-2ac9-4cfc-aa49-c17194029cb3 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.534353] env[61273]: DEBUG nova.compute.provider_tree [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 822.534845] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] Expecting reply to msg 7231421a54314262836716cc23f12024 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 822.540078] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 20948b6e8b454bbd9c5d4dcb9691f922 [ 822.541601] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7231421a54314262836716cc23f12024 [ 822.648212] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 822.917023] env[61273]: DEBUG oslo_concurrency.lockutils [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Releasing lock "refresh_cache-e8e826d4-2463-41a7-8c63-fd9f47eceea6" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 822.917023] env[61273]: DEBUG nova.compute.manager [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 822.917023] env[61273]: DEBUG nova.compute.manager [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 822.917023] env[61273]: DEBUG nova.network.neutron [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 822.938544] env[61273]: DEBUG nova.network.neutron [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 822.939175] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Expecting reply to msg c4fdf429c1e84aec8f1a07b68123d950 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 822.948371] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c4fdf429c1e84aec8f1a07b68123d950 [ 823.037882] env[61273]: DEBUG nova.scheduler.client.report [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 823.040439] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] Expecting reply to msg 002d63a94741415ea86cb914b763faec in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 823.042201] env[61273]: INFO nova.scheduler.client.report [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Deleted allocations for instance 05901bd4-2bad-405e-8e73-f6de4393a0f8 [ 823.047801] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Expecting reply to msg b8341864d94844b6a6266bad80c61bf2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 823.053473] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 002d63a94741415ea86cb914b763faec [ 823.073657] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b8341864d94844b6a6266bad80c61bf2 [ 823.441620] env[61273]: DEBUG nova.network.neutron [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 823.442356] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Expecting reply to msg 95a8d851304043dea86c5e9bd5b91c7e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 823.451389] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 95a8d851304043dea86c5e9bd5b91c7e [ 823.548918] env[61273]: DEBUG oslo_concurrency.lockutils [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.881s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 823.549565] env[61273]: ERROR nova.compute.manager [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] [instance: f6faf064-364d-4d24-9822-220bce47b3f0] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 34cf5a99-d31e-4c7d-aa80-312081d0f6cf, please check neutron logs for more information. [ 823.549565] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] Traceback (most recent call last): [ 823.549565] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 823.549565] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] self.driver.spawn(context, instance, image_meta, [ 823.549565] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 823.549565] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 823.549565] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 823.549565] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] vm_ref = self.build_virtual_machine(instance, [ 823.549565] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 823.549565] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] vif_infos = vmwarevif.get_vif_info(self._session, [ 823.549565] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 823.549909] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] for vif in network_info: [ 823.549909] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 823.549909] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] return self._sync_wrapper(fn, *args, **kwargs) [ 823.549909] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 823.549909] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] self.wait() [ 823.549909] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 823.549909] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] self[:] = self._gt.wait() [ 823.549909] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 823.549909] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] return self._exit_event.wait() [ 823.549909] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 823.549909] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] current.throw(*self._exc) [ 823.549909] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 823.549909] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] result = function(*args, **kwargs) [ 823.550267] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 823.550267] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] return func(*args, **kwargs) [ 823.550267] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 823.550267] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] raise e [ 823.550267] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 823.550267] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] nwinfo = self.network_api.allocate_for_instance( [ 823.550267] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 823.550267] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] created_port_ids = self._update_ports_for_instance( [ 823.550267] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 823.550267] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] with excutils.save_and_reraise_exception(): [ 823.550267] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 823.550267] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] self.force_reraise() [ 823.550267] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 823.550639] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] raise self.value [ 823.550639] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 823.550639] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] updated_port = self._update_port( [ 823.550639] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 823.550639] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] _ensure_no_port_binding_failure(port) [ 823.550639] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 823.550639] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] raise exception.PortBindingFailed(port_id=port['id']) [ 823.550639] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] nova.exception.PortBindingFailed: Binding failed for port 34cf5a99-d31e-4c7d-aa80-312081d0f6cf, please check neutron logs for more information. [ 823.550639] env[61273]: ERROR nova.compute.manager [instance: f6faf064-364d-4d24-9822-220bce47b3f0] [ 823.550639] env[61273]: DEBUG nova.compute.utils [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] [instance: f6faf064-364d-4d24-9822-220bce47b3f0] Binding failed for port 34cf5a99-d31e-4c7d-aa80-312081d0f6cf, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 823.551604] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f8be8826-1188-47f8-80e9-e95177b081f6 tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Lock "05901bd4-2bad-405e-8e73-f6de4393a0f8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 105.418s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 823.551847] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.572s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 823.553413] env[61273]: INFO nova.compute.claims [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 823.555057] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg 349ee6c223cb455d920b99f930f29f3e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 823.556620] env[61273]: DEBUG nova.compute.manager [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] [instance: f6faf064-364d-4d24-9822-220bce47b3f0] Build of instance f6faf064-364d-4d24-9822-220bce47b3f0 was re-scheduled: Binding failed for port 34cf5a99-d31e-4c7d-aa80-312081d0f6cf, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 823.557196] env[61273]: DEBUG nova.compute.manager [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] [instance: f6faf064-364d-4d24-9822-220bce47b3f0] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 823.557425] env[61273]: DEBUG oslo_concurrency.lockutils [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] Acquiring lock "refresh_cache-f6faf064-364d-4d24-9822-220bce47b3f0" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 823.557571] env[61273]: DEBUG oslo_concurrency.lockutils [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] Acquired lock "refresh_cache-f6faf064-364d-4d24-9822-220bce47b3f0" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 823.557732] env[61273]: DEBUG nova.network.neutron [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] [instance: f6faf064-364d-4d24-9822-220bce47b3f0] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 823.558095] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] Expecting reply to msg 34827ad8feb8471bbc6494383b265850 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 823.559015] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg 027bd296094948a6be5596a0606b3730 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 823.564429] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 34827ad8feb8471bbc6494383b265850 [ 823.575140] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 027bd296094948a6be5596a0606b3730 [ 823.598058] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 349ee6c223cb455d920b99f930f29f3e [ 823.944636] env[61273]: INFO nova.compute.manager [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] [instance: e8e826d4-2463-41a7-8c63-fd9f47eceea6] Took 1.03 seconds to deallocate network for instance. [ 823.946337] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Expecting reply to msg f6aa1d16c4ec482bb2131f880c420b9d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 823.991392] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f6aa1d16c4ec482bb2131f880c420b9d [ 824.061897] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg bf5d3867ffcf44978866a391ea4d02d4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 824.065552] env[61273]: DEBUG nova.compute.manager [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 824.067198] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg 0d304619c2474cbd909ceadd3bdd0145 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 824.071197] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bf5d3867ffcf44978866a391ea4d02d4 [ 824.083398] env[61273]: DEBUG nova.network.neutron [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] [instance: f6faf064-364d-4d24-9822-220bce47b3f0] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 824.099678] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0d304619c2474cbd909ceadd3bdd0145 [ 824.158901] env[61273]: DEBUG nova.network.neutron [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] [instance: f6faf064-364d-4d24-9822-220bce47b3f0] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 824.159415] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] Expecting reply to msg ba787ff8863a44c98fa323f08d565c6c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 824.167324] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ba787ff8863a44c98fa323f08d565c6c [ 824.451230] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Expecting reply to msg 0cd5e17b4c41452fb7e3e69605c0947e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 824.452550] env[61273]: DEBUG oslo_concurrency.lockutils [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Acquiring lock "31ab5ebd-3df1-4e9f-bf53-69d47176da01" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 824.452809] env[61273]: DEBUG oslo_concurrency.lockutils [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Lock "31ab5ebd-3df1-4e9f-bf53-69d47176da01" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 824.484353] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0cd5e17b4c41452fb7e3e69605c0947e [ 824.585585] env[61273]: DEBUG oslo_concurrency.lockutils [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 824.663417] env[61273]: DEBUG oslo_concurrency.lockutils [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] Releasing lock "refresh_cache-f6faf064-364d-4d24-9822-220bce47b3f0" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 824.663417] env[61273]: DEBUG nova.compute.manager [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 824.663417] env[61273]: DEBUG nova.compute.manager [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] [instance: f6faf064-364d-4d24-9822-220bce47b3f0] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 824.663417] env[61273]: DEBUG nova.network.neutron [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] [instance: f6faf064-364d-4d24-9822-220bce47b3f0] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 824.675782] env[61273]: DEBUG nova.network.neutron [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] [instance: f6faf064-364d-4d24-9822-220bce47b3f0] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 824.676541] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] Expecting reply to msg c164d2790bcc4b6db1ceb9439c9c2c20 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 824.684563] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c164d2790bcc4b6db1ceb9439c9c2c20 [ 824.818761] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f8315ab-0599-4b74-8bf7-f74b6d57fd63 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.829968] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a9cb168-4881-45f9-bec7-63dcbf6aca2d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.870975] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e053872-a42f-402a-9a90-c8bcc69e5a75 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.878431] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d7c74a9-e319-49a3-8bc1-8eac3b91e693 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.891556] env[61273]: DEBUG nova.compute.provider_tree [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 824.892263] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg 67dbf58f28bf49639d1ee3395e2d4620 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 824.901112] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 67dbf58f28bf49639d1ee3395e2d4620 [ 824.974347] env[61273]: INFO nova.scheduler.client.report [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Deleted allocations for instance e8e826d4-2463-41a7-8c63-fd9f47eceea6 [ 824.981140] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Expecting reply to msg c51eebd1b67247ec8fa3c81d473bcf6d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 824.998894] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c51eebd1b67247ec8fa3c81d473bcf6d [ 825.178617] env[61273]: DEBUG nova.network.neutron [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] [instance: f6faf064-364d-4d24-9822-220bce47b3f0] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 825.179156] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] Expecting reply to msg dcf626ee5f604552b4f1fdcc2eca45f4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 825.188403] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dcf626ee5f604552b4f1fdcc2eca45f4 [ 825.398841] env[61273]: DEBUG nova.scheduler.client.report [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 825.401399] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg 9c3d41d77d7b4d96a8aeb8cf9716dcbc in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 825.412857] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9c3d41d77d7b4d96a8aeb8cf9716dcbc [ 825.488354] env[61273]: DEBUG oslo_concurrency.lockutils [None req-0150cb50-8c5d-44e5-9eb2-cfd5475cecfd tempest-ListImageFiltersTestJSON-2140387087 tempest-ListImageFiltersTestJSON-2140387087-project-member] Lock "e8e826d4-2463-41a7-8c63-fd9f47eceea6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 106.873s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 825.488930] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg 98be7e20d12f4e7d8f346b337071ebe7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 825.499888] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 98be7e20d12f4e7d8f346b337071ebe7 [ 825.681921] env[61273]: INFO nova.compute.manager [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] [instance: f6faf064-364d-4d24-9822-220bce47b3f0] Took 1.02 seconds to deallocate network for instance. [ 825.683700] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] Expecting reply to msg 9a7f432161634b48940903d82f277ff2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 825.729748] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9a7f432161634b48940903d82f277ff2 [ 825.904106] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.352s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 825.904689] env[61273]: DEBUG nova.compute.manager [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 825.906474] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg 49f0287005834bd092d4de40585ec92d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 825.908203] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.172s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 825.910044] env[61273]: INFO nova.compute.claims [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 6494039f-3716-4174-92c0-15df384e0878] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 825.911572] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg ce38d5c45f344e21a9f712eaa4394aa6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 825.940826] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 49f0287005834bd092d4de40585ec92d [ 825.961789] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ce38d5c45f344e21a9f712eaa4394aa6 [ 825.990955] env[61273]: DEBUG nova.compute.manager [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 825.992808] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg 9a682b89f6b54459bf461d0271777022 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 826.033003] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9a682b89f6b54459bf461d0271777022 [ 826.190183] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] Expecting reply to msg 3c9f94ba33b4450c94b1953d3b7ac64b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 826.259493] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3c9f94ba33b4450c94b1953d3b7ac64b [ 826.410292] env[61273]: DEBUG nova.compute.utils [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 826.410940] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg 61ca3fec71e14e7d89a10ac472775116 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 826.411974] env[61273]: DEBUG nova.compute.manager [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 826.412205] env[61273]: DEBUG nova.network.neutron [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 826.415590] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 551f1aee08f14c7bb45358d1010bdcd5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 826.424175] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 551f1aee08f14c7bb45358d1010bdcd5 [ 826.454991] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 61ca3fec71e14e7d89a10ac472775116 [ 826.463566] env[61273]: DEBUG nova.policy [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'afd2b293ac5747749b0bae2b787277ca', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6b8d7d7387e44003b6b4bc488c7900f3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 826.514798] env[61273]: DEBUG oslo_concurrency.lockutils [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 826.716555] env[61273]: INFO nova.scheduler.client.report [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] Deleted allocations for instance f6faf064-364d-4d24-9822-220bce47b3f0 [ 826.722525] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] Expecting reply to msg 53523ab35b1b41d0910a847621b2702c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 826.751132] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 53523ab35b1b41d0910a847621b2702c [ 826.921405] env[61273]: DEBUG nova.compute.manager [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 826.923267] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg 5b1e997bdc0f4cf7b8392a133c8858bb in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 826.925127] env[61273]: DEBUG nova.network.neutron [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] Successfully created port: 9a1e113b-fad2-4524-a3b5-29c7375c9242 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 826.978131] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5b1e997bdc0f4cf7b8392a133c8858bb [ 827.157363] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e495c6c9-07ed-4476-9dc0-eb430e00a7bc {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.165743] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1703c2f6-bd6c-4057-94ea-0732f9dd290d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.204616] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-123aec11-da91-4bb6-bcdf-e3027e0faf30 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.214647] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc59d211-fa21-411c-a86d-e6702bfe0da7 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.228045] env[61273]: DEBUG nova.compute.provider_tree [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 827.228553] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg a2a2796f47514545911d785413bab361 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 827.235678] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a2a2796f47514545911d785413bab361 [ 827.236231] env[61273]: DEBUG oslo_concurrency.lockutils [None req-fb7c379d-8f2e-45eb-99d0-1b012e6f3046 tempest-ServersNegativeTestMultiTenantJSON-392229405 tempest-ServersNegativeTestMultiTenantJSON-392229405-project-member] Lock "f6faf064-364d-4d24-9822-220bce47b3f0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 105.620s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 827.236736] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg 37ca6e8e749842b2ba8d11cd1189daa5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 827.251768] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 37ca6e8e749842b2ba8d11cd1189daa5 [ 827.430314] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg 11507af310df4145a4a487516b44c432 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 827.461536] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 11507af310df4145a4a487516b44c432 [ 827.731172] env[61273]: DEBUG nova.scheduler.client.report [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 827.733690] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 2e5d1d1b39fd46e4b3cc34a06607856d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 827.740346] env[61273]: DEBUG nova.compute.manager [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: ae9866e2-544a-4d26-b198-87110f42f054] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 827.740445] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg beef196736894f579d495a7d767d728d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 827.746228] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2e5d1d1b39fd46e4b3cc34a06607856d [ 827.779737] env[61273]: DEBUG nova.compute.manager [req-a34cc2ba-ce1d-4f88-ac82-835ac8bbeff6 req-b60b5afe-30c5-4ac8-9206-1e27bdc4ed52 service nova] [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] Received event network-changed-9a1e113b-fad2-4524-a3b5-29c7375c9242 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 827.779964] env[61273]: DEBUG nova.compute.manager [req-a34cc2ba-ce1d-4f88-ac82-835ac8bbeff6 req-b60b5afe-30c5-4ac8-9206-1e27bdc4ed52 service nova] [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] Refreshing instance network info cache due to event network-changed-9a1e113b-fad2-4524-a3b5-29c7375c9242. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 827.780203] env[61273]: DEBUG oslo_concurrency.lockutils [req-a34cc2ba-ce1d-4f88-ac82-835ac8bbeff6 req-b60b5afe-30c5-4ac8-9206-1e27bdc4ed52 service nova] Acquiring lock "refresh_cache-fcdd594c-b89f-4d0b-a4d5-2644b3b62b56" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 827.780346] env[61273]: DEBUG oslo_concurrency.lockutils [req-a34cc2ba-ce1d-4f88-ac82-835ac8bbeff6 req-b60b5afe-30c5-4ac8-9206-1e27bdc4ed52 service nova] Acquired lock "refresh_cache-fcdd594c-b89f-4d0b-a4d5-2644b3b62b56" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 827.780506] env[61273]: DEBUG nova.network.neutron [req-a34cc2ba-ce1d-4f88-ac82-835ac8bbeff6 req-b60b5afe-30c5-4ac8-9206-1e27bdc4ed52 service nova] [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] Refreshing network info cache for port 9a1e113b-fad2-4524-a3b5-29c7375c9242 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 827.780923] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-a34cc2ba-ce1d-4f88-ac82-835ac8bbeff6 req-b60b5afe-30c5-4ac8-9206-1e27bdc4ed52 service nova] Expecting reply to msg 5a5415132af843f49ab92a47ad4d1e7f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 827.787763] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg beef196736894f579d495a7d767d728d [ 827.790170] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5a5415132af843f49ab92a47ad4d1e7f [ 827.933752] env[61273]: DEBUG nova.compute.manager [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 827.960198] env[61273]: DEBUG nova.virt.hardware [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 827.960447] env[61273]: DEBUG nova.virt.hardware [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 827.960605] env[61273]: DEBUG nova.virt.hardware [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 827.960779] env[61273]: DEBUG nova.virt.hardware [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 827.960924] env[61273]: DEBUG nova.virt.hardware [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 827.961072] env[61273]: DEBUG nova.virt.hardware [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 827.961273] env[61273]: DEBUG nova.virt.hardware [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 827.962140] env[61273]: DEBUG nova.virt.hardware [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 827.962364] env[61273]: DEBUG nova.virt.hardware [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 827.962608] env[61273]: DEBUG nova.virt.hardware [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 827.962873] env[61273]: DEBUG nova.virt.hardware [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 827.963760] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb12eb2f-847c-4520-b686-53a5f3f3906b {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.974140] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91f73ed1-7ce0-476a-ba1b-a5725ff1112b {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.012948] env[61273]: ERROR nova.compute.manager [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 9a1e113b-fad2-4524-a3b5-29c7375c9242, please check neutron logs for more information. [ 828.012948] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 828.012948] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 828.012948] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 828.012948] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 828.012948] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 828.012948] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 828.012948] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 828.012948] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 828.012948] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 828.012948] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 828.012948] env[61273]: ERROR nova.compute.manager raise self.value [ 828.012948] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 828.012948] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 828.012948] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 828.012948] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 828.013416] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 828.013416] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 828.013416] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 9a1e113b-fad2-4524-a3b5-29c7375c9242, please check neutron logs for more information. [ 828.013416] env[61273]: ERROR nova.compute.manager [ 828.013572] env[61273]: Traceback (most recent call last): [ 828.013572] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 828.013572] env[61273]: listener.cb(fileno) [ 828.013572] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 828.013572] env[61273]: result = function(*args, **kwargs) [ 828.013703] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 828.013703] env[61273]: return func(*args, **kwargs) [ 828.013703] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 828.013703] env[61273]: raise e [ 828.013703] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 828.013703] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 828.013703] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 828.013703] env[61273]: created_port_ids = self._update_ports_for_instance( [ 828.013703] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 828.013703] env[61273]: with excutils.save_and_reraise_exception(): [ 828.013703] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 828.013703] env[61273]: self.force_reraise() [ 828.013703] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 828.013703] env[61273]: raise self.value [ 828.013703] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 828.013703] env[61273]: updated_port = self._update_port( [ 828.013703] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 828.013703] env[61273]: _ensure_no_port_binding_failure(port) [ 828.013703] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 828.013703] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 828.013703] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 9a1e113b-fad2-4524-a3b5-29c7375c9242, please check neutron logs for more information. [ 828.013703] env[61273]: Removing descriptor: 19 [ 828.014792] env[61273]: ERROR nova.compute.manager [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 9a1e113b-fad2-4524-a3b5-29c7375c9242, please check neutron logs for more information. [ 828.014792] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] Traceback (most recent call last): [ 828.014792] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 828.014792] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] yield resources [ 828.014792] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 828.014792] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] self.driver.spawn(context, instance, image_meta, [ 828.014792] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 828.014792] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] self._vmops.spawn(context, instance, image_meta, injected_files, [ 828.014792] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 828.014792] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] vm_ref = self.build_virtual_machine(instance, [ 828.014792] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 828.015093] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] vif_infos = vmwarevif.get_vif_info(self._session, [ 828.015093] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 828.015093] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] for vif in network_info: [ 828.015093] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 828.015093] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] return self._sync_wrapper(fn, *args, **kwargs) [ 828.015093] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 828.015093] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] self.wait() [ 828.015093] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 828.015093] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] self[:] = self._gt.wait() [ 828.015093] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 828.015093] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] return self._exit_event.wait() [ 828.015093] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 828.015093] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] result = hub.switch() [ 828.015696] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 828.015696] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] return self.greenlet.switch() [ 828.015696] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 828.015696] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] result = function(*args, **kwargs) [ 828.015696] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 828.015696] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] return func(*args, **kwargs) [ 828.015696] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 828.015696] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] raise e [ 828.015696] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 828.015696] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] nwinfo = self.network_api.allocate_for_instance( [ 828.015696] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 828.015696] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] created_port_ids = self._update_ports_for_instance( [ 828.015696] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 828.016098] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] with excutils.save_and_reraise_exception(): [ 828.016098] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 828.016098] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] self.force_reraise() [ 828.016098] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 828.016098] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] raise self.value [ 828.016098] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 828.016098] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] updated_port = self._update_port( [ 828.016098] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 828.016098] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] _ensure_no_port_binding_failure(port) [ 828.016098] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 828.016098] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] raise exception.PortBindingFailed(port_id=port['id']) [ 828.016098] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] nova.exception.PortBindingFailed: Binding failed for port 9a1e113b-fad2-4524-a3b5-29c7375c9242, please check neutron logs for more information. [ 828.016098] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] [ 828.016445] env[61273]: INFO nova.compute.manager [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] Terminating instance [ 828.018110] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Acquiring lock "refresh_cache-fcdd594c-b89f-4d0b-a4d5-2644b3b62b56" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 828.238786] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.330s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 828.239304] env[61273]: DEBUG nova.compute.manager [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 6494039f-3716-4174-92c0-15df384e0878] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 828.241088] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg c0ef5d5612bd480abaa07bb4ccb06d27 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 828.248692] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.250s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 828.250406] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] Expecting reply to msg abd2ac3dad7143b8a45ef4e2677ed528 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 828.269772] env[61273]: DEBUG oslo_concurrency.lockutils [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 828.286461] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c0ef5d5612bd480abaa07bb4ccb06d27 [ 828.300741] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg abd2ac3dad7143b8a45ef4e2677ed528 [ 828.306461] env[61273]: DEBUG nova.network.neutron [req-a34cc2ba-ce1d-4f88-ac82-835ac8bbeff6 req-b60b5afe-30c5-4ac8-9206-1e27bdc4ed52 service nova] [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 828.419064] env[61273]: DEBUG nova.network.neutron [req-a34cc2ba-ce1d-4f88-ac82-835ac8bbeff6 req-b60b5afe-30c5-4ac8-9206-1e27bdc4ed52 service nova] [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 828.419651] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-a34cc2ba-ce1d-4f88-ac82-835ac8bbeff6 req-b60b5afe-30c5-4ac8-9206-1e27bdc4ed52 service nova] Expecting reply to msg 315731e6c8a2407fb5d80cfed17e55c4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 828.429496] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 315731e6c8a2407fb5d80cfed17e55c4 [ 828.750567] env[61273]: DEBUG nova.compute.utils [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 828.751222] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 28dc6c1984674f82bfa05a0a2be9c80a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 828.752202] env[61273]: DEBUG nova.compute.manager [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 6494039f-3716-4174-92c0-15df384e0878] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 828.752375] env[61273]: DEBUG nova.network.neutron [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 6494039f-3716-4174-92c0-15df384e0878] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 828.762787] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 28dc6c1984674f82bfa05a0a2be9c80a [ 828.888084] env[61273]: DEBUG nova.policy [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c471664da5894985bf7478057ea19b73', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a3de421e0f994df8b809ce0096753f23', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 828.922645] env[61273]: DEBUG oslo_concurrency.lockutils [req-a34cc2ba-ce1d-4f88-ac82-835ac8bbeff6 req-b60b5afe-30c5-4ac8-9206-1e27bdc4ed52 service nova] Releasing lock "refresh_cache-fcdd594c-b89f-4d0b-a4d5-2644b3b62b56" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 828.922984] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Acquired lock "refresh_cache-fcdd594c-b89f-4d0b-a4d5-2644b3b62b56" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 828.923160] env[61273]: DEBUG nova.network.neutron [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 828.924202] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg 32b145f3126749a296a0c68233b81551 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 828.930485] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 32b145f3126749a296a0c68233b81551 [ 829.034143] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1919e81-f830-41b2-b353-49058ffcfc62 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.042413] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63de2df8-ab22-4aed-8fc1-d86a0918a0dc {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.082277] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1062d88-6632-4016-8807-2a22f86bca24 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.092317] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bd448a9-fd31-4da1-a2a8-22bdcd80c3a4 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.104089] env[61273]: DEBUG nova.compute.provider_tree [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 829.104644] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] Expecting reply to msg 396417b8a2e64f9b83fded72b2c48a90 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 829.113436] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 396417b8a2e64f9b83fded72b2c48a90 [ 829.265404] env[61273]: DEBUG nova.compute.manager [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 6494039f-3716-4174-92c0-15df384e0878] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 829.265404] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 3218917b152d4db5b725fb45331d95a7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 829.300587] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3218917b152d4db5b725fb45331d95a7 [ 829.441947] env[61273]: DEBUG nova.network.neutron [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 829.516624] env[61273]: DEBUG nova.network.neutron [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 829.517226] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg c72d2544fd324698a992b2a9fe85cf06 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 829.525089] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c72d2544fd324698a992b2a9fe85cf06 [ 829.534567] env[61273]: DEBUG nova.network.neutron [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 6494039f-3716-4174-92c0-15df384e0878] Successfully created port: 2c33529b-82e4-4376-a0ac-52e2aa5b95c6 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 829.606992] env[61273]: DEBUG nova.scheduler.client.report [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 829.609537] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] Expecting reply to msg eb37748e4b844641a53a12149e4894a2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 829.623201] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eb37748e4b844641a53a12149e4894a2 [ 829.771550] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg c95020cdfca64e3e893fbe2ad057e561 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 829.808435] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c95020cdfca64e3e893fbe2ad057e561 [ 829.953599] env[61273]: DEBUG nova.compute.manager [req-2606c1d8-b151-4141-8402-89db32695220 req-6aeb5a53-ffc2-4bff-9141-c774625fdba0 service nova] [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] Received event network-vif-deleted-9a1e113b-fad2-4524-a3b5-29c7375c9242 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 830.022944] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Releasing lock "refresh_cache-fcdd594c-b89f-4d0b-a4d5-2644b3b62b56" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 830.022944] env[61273]: DEBUG nova.compute.manager [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 830.022944] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 830.022944] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7a2d2fdc-6668-42bc-9200-26f227f2048c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.031764] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f6c5fb0-1d56-4b13-ae54-993b32865dfc {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.057558] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance fcdd594c-b89f-4d0b-a4d5-2644b3b62b56 could not be found. [ 830.057974] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 830.058489] env[61273]: INFO nova.compute.manager [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] Took 0.04 seconds to destroy the instance on the hypervisor. [ 830.059111] env[61273]: DEBUG oslo.service.loopingcall [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 830.059478] env[61273]: DEBUG nova.compute.manager [-] [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 830.059836] env[61273]: DEBUG nova.network.neutron [-] [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 830.085864] env[61273]: DEBUG nova.network.neutron [-] [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 830.085864] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 2d67811aa68740089b77c3d9613e3c82 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 830.095199] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2d67811aa68740089b77c3d9613e3c82 [ 830.112337] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.864s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 830.113004] env[61273]: ERROR nova.compute.manager [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 56e44824-11a2-4db8-8e08-d6d6b0ecf51d, please check neutron logs for more information. [ 830.113004] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] Traceback (most recent call last): [ 830.113004] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 830.113004] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] self.driver.spawn(context, instance, image_meta, [ 830.113004] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 830.113004] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 830.113004] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 830.113004] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] vm_ref = self.build_virtual_machine(instance, [ 830.113004] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 830.113004] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] vif_infos = vmwarevif.get_vif_info(self._session, [ 830.113004] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 830.113329] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] for vif in network_info: [ 830.113329] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 830.113329] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] return self._sync_wrapper(fn, *args, **kwargs) [ 830.113329] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 830.113329] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] self.wait() [ 830.113329] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 830.113329] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] self[:] = self._gt.wait() [ 830.113329] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 830.113329] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] return self._exit_event.wait() [ 830.113329] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 830.113329] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] current.throw(*self._exc) [ 830.113329] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 830.113329] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] result = function(*args, **kwargs) [ 830.113663] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 830.113663] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] return func(*args, **kwargs) [ 830.113663] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 830.113663] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] raise e [ 830.113663] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 830.113663] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] nwinfo = self.network_api.allocate_for_instance( [ 830.113663] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 830.113663] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] created_port_ids = self._update_ports_for_instance( [ 830.113663] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 830.113663] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] with excutils.save_and_reraise_exception(): [ 830.113663] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 830.113663] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] self.force_reraise() [ 830.113663] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 830.114026] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] raise self.value [ 830.114026] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 830.114026] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] updated_port = self._update_port( [ 830.114026] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 830.114026] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] _ensure_no_port_binding_failure(port) [ 830.114026] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 830.114026] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] raise exception.PortBindingFailed(port_id=port['id']) [ 830.114026] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] nova.exception.PortBindingFailed: Binding failed for port 56e44824-11a2-4db8-8e08-d6d6b0ecf51d, please check neutron logs for more information. [ 830.114026] env[61273]: ERROR nova.compute.manager [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] [ 830.114026] env[61273]: DEBUG nova.compute.utils [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] Binding failed for port 56e44824-11a2-4db8-8e08-d6d6b0ecf51d, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 830.115404] env[61273]: DEBUG nova.compute.manager [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] Build of instance d63e20b1-e4ee-4c90-bc94-c4c05917fa1f was re-scheduled: Binding failed for port 56e44824-11a2-4db8-8e08-d6d6b0ecf51d, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 830.115818] env[61273]: DEBUG nova.compute.manager [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 830.116065] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] Acquiring lock "refresh_cache-d63e20b1-e4ee-4c90-bc94-c4c05917fa1f" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 830.116219] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] Acquired lock "refresh_cache-d63e20b1-e4ee-4c90-bc94-c4c05917fa1f" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.116447] env[61273]: DEBUG nova.network.neutron [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 830.116769] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] Expecting reply to msg f2629f23a457416eb7f6846b5547f5af in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 830.117619] env[61273]: DEBUG oslo_concurrency.lockutils [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.065s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 830.120053] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Expecting reply to msg 172461ebc31e422d891b91f6e2632d66 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 830.132712] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f2629f23a457416eb7f6846b5547f5af [ 830.165476] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 172461ebc31e422d891b91f6e2632d66 [ 830.276668] env[61273]: DEBUG nova.compute.manager [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 6494039f-3716-4174-92c0-15df384e0878] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 830.313540] env[61273]: DEBUG nova.virt.hardware [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 830.313809] env[61273]: DEBUG nova.virt.hardware [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 830.313980] env[61273]: DEBUG nova.virt.hardware [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 830.314219] env[61273]: DEBUG nova.virt.hardware [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 830.314358] env[61273]: DEBUG nova.virt.hardware [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 830.314502] env[61273]: DEBUG nova.virt.hardware [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 830.314700] env[61273]: DEBUG nova.virt.hardware [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 830.314931] env[61273]: DEBUG nova.virt.hardware [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 830.315164] env[61273]: DEBUG nova.virt.hardware [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 830.315337] env[61273]: DEBUG nova.virt.hardware [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 830.317821] env[61273]: DEBUG nova.virt.hardware [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 830.317821] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd871548-3696-4234-a61c-fc44697299fe {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.330010] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82f6bc3c-def6-4eed-b907-ee182952d520 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.404027] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] Acquiring lock "7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 830.410639] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] Lock "7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.007s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 830.587630] env[61273]: DEBUG nova.network.neutron [-] [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 830.588139] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg b2d94f03de0a42a89c7130019a55c787 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 830.597077] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b2d94f03de0a42a89c7130019a55c787 [ 830.638667] env[61273]: DEBUG nova.network.neutron [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 830.842209] env[61273]: DEBUG nova.network.neutron [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 830.842845] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] Expecting reply to msg 84ab3ecb1c9f4905be572a02ec4d9ada in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 830.855816] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 84ab3ecb1c9f4905be572a02ec4d9ada [ 830.923988] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb7e86aa-b462-426d-bbe0-f5287bff365f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.933432] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c7befd5-5526-407b-822e-266ec7f9a4b5 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.963719] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9862dd4-d4ef-4e2e-9c02-92d6a5fd936a {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.976119] env[61273]: ERROR nova.compute.manager [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 2c33529b-82e4-4376-a0ac-52e2aa5b95c6, please check neutron logs for more information. [ 830.976119] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 830.976119] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 830.976119] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 830.976119] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 830.976119] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 830.976119] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 830.976119] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 830.976119] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 830.976119] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 830.976119] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 830.976119] env[61273]: ERROR nova.compute.manager raise self.value [ 830.976119] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 830.976119] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 830.976119] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 830.976119] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 830.976544] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 830.976544] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 830.976544] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 2c33529b-82e4-4376-a0ac-52e2aa5b95c6, please check neutron logs for more information. [ 830.976544] env[61273]: ERROR nova.compute.manager [ 830.976544] env[61273]: Traceback (most recent call last): [ 830.976544] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 830.976544] env[61273]: listener.cb(fileno) [ 830.976544] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 830.976544] env[61273]: result = function(*args, **kwargs) [ 830.976544] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 830.976544] env[61273]: return func(*args, **kwargs) [ 830.976544] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 830.976544] env[61273]: raise e [ 830.976544] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 830.976544] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 830.976544] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 830.976544] env[61273]: created_port_ids = self._update_ports_for_instance( [ 830.976544] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 830.976544] env[61273]: with excutils.save_and_reraise_exception(): [ 830.976544] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 830.976544] env[61273]: self.force_reraise() [ 830.976544] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 830.976544] env[61273]: raise self.value [ 830.976544] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 830.976544] env[61273]: updated_port = self._update_port( [ 830.976544] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 830.976544] env[61273]: _ensure_no_port_binding_failure(port) [ 830.976544] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 830.976544] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 830.977230] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 2c33529b-82e4-4376-a0ac-52e2aa5b95c6, please check neutron logs for more information. [ 830.977230] env[61273]: Removing descriptor: 19 [ 830.979045] env[61273]: ERROR nova.compute.manager [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 6494039f-3716-4174-92c0-15df384e0878] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 2c33529b-82e4-4376-a0ac-52e2aa5b95c6, please check neutron logs for more information. [ 830.979045] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] Traceback (most recent call last): [ 830.979045] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 830.979045] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] yield resources [ 830.979045] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 830.979045] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] self.driver.spawn(context, instance, image_meta, [ 830.979045] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 830.979045] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] self._vmops.spawn(context, instance, image_meta, injected_files, [ 830.979045] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 830.979045] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] vm_ref = self.build_virtual_machine(instance, [ 830.979045] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 830.979364] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] vif_infos = vmwarevif.get_vif_info(self._session, [ 830.979364] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 830.979364] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] for vif in network_info: [ 830.979364] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 830.979364] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] return self._sync_wrapper(fn, *args, **kwargs) [ 830.979364] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 830.979364] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] self.wait() [ 830.979364] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 830.979364] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] self[:] = self._gt.wait() [ 830.979364] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 830.979364] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] return self._exit_event.wait() [ 830.979364] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 830.979364] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] result = hub.switch() [ 830.979737] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 830.979737] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] return self.greenlet.switch() [ 830.979737] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 830.979737] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] result = function(*args, **kwargs) [ 830.979737] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 830.979737] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] return func(*args, **kwargs) [ 830.979737] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 830.979737] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] raise e [ 830.979737] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 830.979737] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] nwinfo = self.network_api.allocate_for_instance( [ 830.979737] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 830.979737] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] created_port_ids = self._update_ports_for_instance( [ 830.979737] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 830.980330] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] with excutils.save_and_reraise_exception(): [ 830.980330] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 830.980330] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] self.force_reraise() [ 830.980330] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 830.980330] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] raise self.value [ 830.980330] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 830.980330] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] updated_port = self._update_port( [ 830.980330] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 830.980330] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] _ensure_no_port_binding_failure(port) [ 830.980330] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 830.980330] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] raise exception.PortBindingFailed(port_id=port['id']) [ 830.980330] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] nova.exception.PortBindingFailed: Binding failed for port 2c33529b-82e4-4376-a0ac-52e2aa5b95c6, please check neutron logs for more information. [ 830.980330] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] [ 830.980813] env[61273]: INFO nova.compute.manager [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 6494039f-3716-4174-92c0-15df384e0878] Terminating instance [ 830.981904] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-403f48ec-e13e-4bcc-be03-ae234eb9039f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.986161] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Acquiring lock "refresh_cache-6494039f-3716-4174-92c0-15df384e0878" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 830.986310] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Acquired lock "refresh_cache-6494039f-3716-4174-92c0-15df384e0878" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.986465] env[61273]: DEBUG nova.network.neutron [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 6494039f-3716-4174-92c0-15df384e0878] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 830.987034] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 8ea99d46525e420393eb0bad85af58a9 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 830.999511] env[61273]: DEBUG nova.compute.provider_tree [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 831.000071] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Expecting reply to msg 2521f22822764209abd30bbef7038b18 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 831.005646] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8ea99d46525e420393eb0bad85af58a9 [ 831.009123] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2521f22822764209abd30bbef7038b18 [ 831.090480] env[61273]: INFO nova.compute.manager [-] [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] Took 1.03 seconds to deallocate network for instance. [ 831.092864] env[61273]: DEBUG nova.compute.claims [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 831.093052] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 831.351446] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] Releasing lock "refresh_cache-d63e20b1-e4ee-4c90-bc94-c4c05917fa1f" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 831.351760] env[61273]: DEBUG nova.compute.manager [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 831.351963] env[61273]: DEBUG nova.compute.manager [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 831.352173] env[61273]: DEBUG nova.network.neutron [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 831.368324] env[61273]: DEBUG nova.network.neutron [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 831.368919] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] Expecting reply to msg f482b3b6d1e34400918dd3d4220142d1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 831.376964] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f482b3b6d1e34400918dd3d4220142d1 [ 831.507398] env[61273]: DEBUG nova.scheduler.client.report [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 831.509823] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Expecting reply to msg 11ab4483cfd64603b3272c78b84f1a3a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 831.511311] env[61273]: DEBUG nova.network.neutron [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 6494039f-3716-4174-92c0-15df384e0878] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 831.523354] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 11ab4483cfd64603b3272c78b84f1a3a [ 831.625936] env[61273]: DEBUG nova.network.neutron [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 6494039f-3716-4174-92c0-15df384e0878] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.626542] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 45c4485f64064b3cbf4d362ec9013c17 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 831.641451] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 45c4485f64064b3cbf4d362ec9013c17 [ 831.871810] env[61273]: DEBUG nova.network.neutron [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.872390] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] Expecting reply to msg 4602bdba2e9649e6a60c7e569ba2ac06 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 831.882456] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4602bdba2e9649e6a60c7e569ba2ac06 [ 831.980758] env[61273]: DEBUG nova.compute.manager [req-6ba79080-bb22-4091-83f3-d92cb958558a req-3687ca9a-6977-4c02-b8ba-6c0683dd60eb service nova] [instance: 6494039f-3716-4174-92c0-15df384e0878] Received event network-changed-2c33529b-82e4-4376-a0ac-52e2aa5b95c6 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 831.980941] env[61273]: DEBUG nova.compute.manager [req-6ba79080-bb22-4091-83f3-d92cb958558a req-3687ca9a-6977-4c02-b8ba-6c0683dd60eb service nova] [instance: 6494039f-3716-4174-92c0-15df384e0878] Refreshing instance network info cache due to event network-changed-2c33529b-82e4-4376-a0ac-52e2aa5b95c6. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 831.981133] env[61273]: DEBUG oslo_concurrency.lockutils [req-6ba79080-bb22-4091-83f3-d92cb958558a req-3687ca9a-6977-4c02-b8ba-6c0683dd60eb service nova] Acquiring lock "refresh_cache-6494039f-3716-4174-92c0-15df384e0878" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 832.014948] env[61273]: DEBUG oslo_concurrency.lockutils [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.897s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 832.015590] env[61273]: ERROR nova.compute.manager [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 33201922-1943-4bb7-92f4-71b9565e8b26, please check neutron logs for more information. [ 832.015590] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] Traceback (most recent call last): [ 832.015590] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 832.015590] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] self.driver.spawn(context, instance, image_meta, [ 832.015590] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 832.015590] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] self._vmops.spawn(context, instance, image_meta, injected_files, [ 832.015590] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 832.015590] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] vm_ref = self.build_virtual_machine(instance, [ 832.015590] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 832.015590] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] vif_infos = vmwarevif.get_vif_info(self._session, [ 832.015590] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 832.015971] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] for vif in network_info: [ 832.015971] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 832.015971] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] return self._sync_wrapper(fn, *args, **kwargs) [ 832.015971] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 832.015971] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] self.wait() [ 832.015971] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 832.015971] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] self[:] = self._gt.wait() [ 832.015971] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 832.015971] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] return self._exit_event.wait() [ 832.015971] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 832.015971] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] current.throw(*self._exc) [ 832.015971] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 832.015971] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] result = function(*args, **kwargs) [ 832.016332] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 832.016332] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] return func(*args, **kwargs) [ 832.016332] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 832.016332] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] raise e [ 832.016332] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 832.016332] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] nwinfo = self.network_api.allocate_for_instance( [ 832.016332] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 832.016332] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] created_port_ids = self._update_ports_for_instance( [ 832.016332] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 832.016332] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] with excutils.save_and_reraise_exception(): [ 832.016332] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 832.016332] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] self.force_reraise() [ 832.016332] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 832.016665] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] raise self.value [ 832.016665] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 832.016665] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] updated_port = self._update_port( [ 832.016665] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 832.016665] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] _ensure_no_port_binding_failure(port) [ 832.016665] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 832.016665] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] raise exception.PortBindingFailed(port_id=port['id']) [ 832.016665] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] nova.exception.PortBindingFailed: Binding failed for port 33201922-1943-4bb7-92f4-71b9565e8b26, please check neutron logs for more information. [ 832.016665] env[61273]: ERROR nova.compute.manager [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] [ 832.016665] env[61273]: DEBUG nova.compute.utils [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] Binding failed for port 33201922-1943-4bb7-92f4-71b9565e8b26, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 832.017659] env[61273]: DEBUG oslo_concurrency.lockutils [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.607s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 832.019562] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg 2be5a56eeaef48fea95546d30f5829f9 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 832.021246] env[61273]: DEBUG nova.compute.manager [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] Build of instance ebc03a5c-984f-4d58-abb0-da555adcfbac was re-scheduled: Binding failed for port 33201922-1943-4bb7-92f4-71b9565e8b26, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 832.021771] env[61273]: DEBUG nova.compute.manager [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 832.021997] env[61273]: DEBUG oslo_concurrency.lockutils [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Acquiring lock "refresh_cache-ebc03a5c-984f-4d58-abb0-da555adcfbac" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 832.022141] env[61273]: DEBUG oslo_concurrency.lockutils [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Acquired lock "refresh_cache-ebc03a5c-984f-4d58-abb0-da555adcfbac" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.022294] env[61273]: DEBUG nova.network.neutron [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 832.022661] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Expecting reply to msg 5149d1f4c268425cad59c150c98a9cf7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 832.028894] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5149d1f4c268425cad59c150c98a9cf7 [ 832.063342] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2be5a56eeaef48fea95546d30f5829f9 [ 832.128586] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Releasing lock "refresh_cache-6494039f-3716-4174-92c0-15df384e0878" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 832.129135] env[61273]: DEBUG nova.compute.manager [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 6494039f-3716-4174-92c0-15df384e0878] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 832.129341] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 6494039f-3716-4174-92c0-15df384e0878] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 832.130018] env[61273]: DEBUG oslo_concurrency.lockutils [req-6ba79080-bb22-4091-83f3-d92cb958558a req-3687ca9a-6977-4c02-b8ba-6c0683dd60eb service nova] Acquired lock "refresh_cache-6494039f-3716-4174-92c0-15df384e0878" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.130203] env[61273]: DEBUG nova.network.neutron [req-6ba79080-bb22-4091-83f3-d92cb958558a req-3687ca9a-6977-4c02-b8ba-6c0683dd60eb service nova] [instance: 6494039f-3716-4174-92c0-15df384e0878] Refreshing network info cache for port 2c33529b-82e4-4376-a0ac-52e2aa5b95c6 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 832.130786] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-6ba79080-bb22-4091-83f3-d92cb958558a req-3687ca9a-6977-4c02-b8ba-6c0683dd60eb service nova] Expecting reply to msg f12165ad44f849c3a1e468d77421b192 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 832.131795] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c321ee9e-685d-4e0d-8ca6-fbebfd9d5dd5 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.140574] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cca1cefc-48b9-428f-9919-b180b341c5d2 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.151294] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f12165ad44f849c3a1e468d77421b192 [ 832.162809] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 6494039f-3716-4174-92c0-15df384e0878] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6494039f-3716-4174-92c0-15df384e0878 could not be found. [ 832.163032] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 6494039f-3716-4174-92c0-15df384e0878] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 832.163204] env[61273]: INFO nova.compute.manager [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 6494039f-3716-4174-92c0-15df384e0878] Took 0.03 seconds to destroy the instance on the hypervisor. [ 832.163447] env[61273]: DEBUG oslo.service.loopingcall [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 832.163658] env[61273]: DEBUG nova.compute.manager [-] [instance: 6494039f-3716-4174-92c0-15df384e0878] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 832.163746] env[61273]: DEBUG nova.network.neutron [-] [instance: 6494039f-3716-4174-92c0-15df384e0878] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 832.183923] env[61273]: DEBUG nova.network.neutron [-] [instance: 6494039f-3716-4174-92c0-15df384e0878] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 832.190485] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 469e16d939544cb397364a592f237d79 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 832.191033] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 469e16d939544cb397364a592f237d79 [ 832.374618] env[61273]: INFO nova.compute.manager [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] [instance: d63e20b1-e4ee-4c90-bc94-c4c05917fa1f] Took 1.02 seconds to deallocate network for instance. [ 832.376468] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] Expecting reply to msg b75c7ea03087492fa9123c9b9b631717 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 832.413190] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b75c7ea03087492fa9123c9b9b631717 [ 832.571482] env[61273]: DEBUG nova.network.neutron [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 832.662931] env[61273]: DEBUG nova.network.neutron [req-6ba79080-bb22-4091-83f3-d92cb958558a req-3687ca9a-6977-4c02-b8ba-6c0683dd60eb service nova] [instance: 6494039f-3716-4174-92c0-15df384e0878] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 832.680978] env[61273]: DEBUG nova.network.neutron [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 832.681623] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Expecting reply to msg 3f006dabfa4a4dcb80747e8c52e00efc in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 832.686418] env[61273]: DEBUG nova.network.neutron [-] [instance: 6494039f-3716-4174-92c0-15df384e0878] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 832.686808] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 55cfb4a7ae3a48a692787ae380f6a76c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 832.705894] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3f006dabfa4a4dcb80747e8c52e00efc [ 832.706432] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 55cfb4a7ae3a48a692787ae380f6a76c [ 832.761609] env[61273]: DEBUG nova.network.neutron [req-6ba79080-bb22-4091-83f3-d92cb958558a req-3687ca9a-6977-4c02-b8ba-6c0683dd60eb service nova] [instance: 6494039f-3716-4174-92c0-15df384e0878] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 832.762240] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-6ba79080-bb22-4091-83f3-d92cb958558a req-3687ca9a-6977-4c02-b8ba-6c0683dd60eb service nova] Expecting reply to msg 88555bc3a15a44a9bf753f6aa9f198b9 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 832.778373] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 88555bc3a15a44a9bf753f6aa9f198b9 [ 832.852712] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e464ea9-9315-4e65-b06a-24a2ddb7db0b {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.860351] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d0c6ed5-4d0a-414e-962d-597454853381 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.891889] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] Expecting reply to msg 10ca3cbbd18f49c8a86b9f036e9e167c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 832.893870] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fb65edd-e1ac-4e82-af26-40ae97042566 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.901558] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b2cbc56-a009-47ef-b97d-5c9e83457950 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.914510] env[61273]: DEBUG nova.compute.provider_tree [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 832.915170] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg a1ae2f8fa9f3478fa6d0e50981034c97 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 832.922237] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a1ae2f8fa9f3478fa6d0e50981034c97 [ 832.958625] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 10ca3cbbd18f49c8a86b9f036e9e167c [ 833.183895] env[61273]: DEBUG oslo_concurrency.lockutils [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Releasing lock "refresh_cache-ebc03a5c-984f-4d58-abb0-da555adcfbac" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 833.184169] env[61273]: DEBUG nova.compute.manager [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 833.184359] env[61273]: DEBUG nova.compute.manager [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 833.184524] env[61273]: DEBUG nova.network.neutron [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 833.196860] env[61273]: INFO nova.compute.manager [-] [instance: 6494039f-3716-4174-92c0-15df384e0878] Took 1.03 seconds to deallocate network for instance. [ 833.199221] env[61273]: DEBUG nova.network.neutron [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 833.199731] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Expecting reply to msg 975189d00d574966bd16b6772caa6d4e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 833.200685] env[61273]: DEBUG nova.compute.claims [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 6494039f-3716-4174-92c0-15df384e0878] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 833.200847] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 833.205731] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 975189d00d574966bd16b6772caa6d4e [ 833.264920] env[61273]: DEBUG oslo_concurrency.lockutils [req-6ba79080-bb22-4091-83f3-d92cb958558a req-3687ca9a-6977-4c02-b8ba-6c0683dd60eb service nova] Releasing lock "refresh_cache-6494039f-3716-4174-92c0-15df384e0878" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 833.265187] env[61273]: DEBUG nova.compute.manager [req-6ba79080-bb22-4091-83f3-d92cb958558a req-3687ca9a-6977-4c02-b8ba-6c0683dd60eb service nova] [instance: 6494039f-3716-4174-92c0-15df384e0878] Received event network-vif-deleted-2c33529b-82e4-4376-a0ac-52e2aa5b95c6 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 833.419681] env[61273]: INFO nova.scheduler.client.report [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] Deleted allocations for instance d63e20b1-e4ee-4c90-bc94-c4c05917fa1f [ 833.425845] env[61273]: DEBUG nova.scheduler.client.report [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 833.428250] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg 8c3df7189b7a4794ac3f05f277af0461 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 833.429767] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] Expecting reply to msg a44332c2f14b43268fdaa3c31cfef405 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 833.440446] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8c3df7189b7a4794ac3f05f277af0461 [ 833.449679] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a44332c2f14b43268fdaa3c31cfef405 [ 833.702720] env[61273]: DEBUG nova.network.neutron [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 833.703300] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Expecting reply to msg b2cbcb56ff384ccf816078939a19f0ae in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 833.713980] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b2cbcb56ff384ccf816078939a19f0ae [ 833.932106] env[61273]: DEBUG oslo_concurrency.lockutils [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.914s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 833.932789] env[61273]: ERROR nova.compute.manager [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port ec48a1a2-a3b3-4ad6-9142-fd30912106b1, please check neutron logs for more information. [ 833.932789] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] Traceback (most recent call last): [ 833.932789] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 833.932789] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] self.driver.spawn(context, instance, image_meta, [ 833.932789] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 833.932789] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] self._vmops.spawn(context, instance, image_meta, injected_files, [ 833.932789] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 833.932789] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] vm_ref = self.build_virtual_machine(instance, [ 833.932789] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 833.932789] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] vif_infos = vmwarevif.get_vif_info(self._session, [ 833.932789] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 833.933146] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] for vif in network_info: [ 833.933146] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 833.933146] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] return self._sync_wrapper(fn, *args, **kwargs) [ 833.933146] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 833.933146] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] self.wait() [ 833.933146] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 833.933146] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] self[:] = self._gt.wait() [ 833.933146] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 833.933146] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] return self._exit_event.wait() [ 833.933146] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 833.933146] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] current.throw(*self._exc) [ 833.933146] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 833.933146] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] result = function(*args, **kwargs) [ 833.933693] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 833.933693] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] return func(*args, **kwargs) [ 833.933693] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 833.933693] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] raise e [ 833.933693] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 833.933693] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] nwinfo = self.network_api.allocate_for_instance( [ 833.933693] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 833.933693] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] created_port_ids = self._update_ports_for_instance( [ 833.933693] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 833.933693] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] with excutils.save_and_reraise_exception(): [ 833.933693] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 833.933693] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] self.force_reraise() [ 833.933693] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 833.934036] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] raise self.value [ 833.934036] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 833.934036] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] updated_port = self._update_port( [ 833.934036] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 833.934036] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] _ensure_no_port_binding_failure(port) [ 833.934036] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 833.934036] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] raise exception.PortBindingFailed(port_id=port['id']) [ 833.934036] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] nova.exception.PortBindingFailed: Binding failed for port ec48a1a2-a3b3-4ad6-9142-fd30912106b1, please check neutron logs for more information. [ 833.934036] env[61273]: ERROR nova.compute.manager [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] [ 833.934036] env[61273]: DEBUG nova.compute.utils [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] Binding failed for port ec48a1a2-a3b3-4ad6-9142-fd30912106b1, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 833.934784] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7a79b3f3-a343-4dd8-a252-83c0b573d07c tempest-ServerExternalEventsTest-1934076090 tempest-ServerExternalEventsTest-1934076090-project-member] Lock "d63e20b1-e4ee-4c90-bc94-c4c05917fa1f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 111.459s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 833.935035] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.134s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 833.936876] env[61273]: INFO nova.compute.claims [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 9debd209-244f-472a-b9d6-cf63bba98839] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 833.938498] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg cbb7da69e8a44db98151688e4845d680 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 833.939740] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg 42661e9018034c1ca96382da85034600 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 833.940582] env[61273]: DEBUG nova.compute.manager [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] Build of instance 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab was re-scheduled: Binding failed for port ec48a1a2-a3b3-4ad6-9142-fd30912106b1, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 833.941094] env[61273]: DEBUG nova.compute.manager [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 833.941315] env[61273]: DEBUG oslo_concurrency.lockutils [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Acquiring lock "refresh_cache-7327b3d9-6f7e-4203-b77b-bc0271e3a6ab" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 833.941461] env[61273]: DEBUG oslo_concurrency.lockutils [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Acquired lock "refresh_cache-7327b3d9-6f7e-4203-b77b-bc0271e3a6ab" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.941618] env[61273]: DEBUG nova.network.neutron [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 833.941990] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg 3c7a96c1203045f6a3315ba200d3e972 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 833.953090] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 42661e9018034c1ca96382da85034600 [ 833.956152] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3c7a96c1203045f6a3315ba200d3e972 [ 833.981101] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cbb7da69e8a44db98151688e4845d680 [ 834.207571] env[61273]: INFO nova.compute.manager [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] [instance: ebc03a5c-984f-4d58-abb0-da555adcfbac] Took 1.02 seconds to deallocate network for instance. [ 834.207728] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Expecting reply to msg f24199dbcf874aa1ba900dacd5b586e5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 834.242150] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f24199dbcf874aa1ba900dacd5b586e5 [ 834.444880] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg cabaa9b3bbeb4dab9e5f631ffdceae64 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 834.446369] env[61273]: DEBUG nova.compute.manager [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 834.447443] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg 692e30df96cd44a8a6c30dc0b5bf8d91 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 834.453795] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cabaa9b3bbeb4dab9e5f631ffdceae64 [ 834.464613] env[61273]: DEBUG nova.network.neutron [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 834.494278] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 692e30df96cd44a8a6c30dc0b5bf8d91 [ 834.560059] env[61273]: DEBUG nova.network.neutron [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.560600] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg d1e47a734ebc44e19dc6dc0d2174e894 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 834.568580] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d1e47a734ebc44e19dc6dc0d2174e894 [ 834.712485] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Expecting reply to msg f51a866730ab4c689807470577600028 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 834.744741] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f51a866730ab4c689807470577600028 [ 834.979816] env[61273]: DEBUG oslo_concurrency.lockutils [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 835.064340] env[61273]: DEBUG oslo_concurrency.lockutils [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Releasing lock "refresh_cache-7327b3d9-6f7e-4203-b77b-bc0271e3a6ab" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 835.064797] env[61273]: DEBUG nova.compute.manager [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 835.064797] env[61273]: DEBUG nova.compute.manager [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 835.064953] env[61273]: DEBUG nova.network.neutron [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 835.082107] env[61273]: DEBUG nova.network.neutron [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 835.082305] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg f5b93cbaa11e4791a97340be76ead4f5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 835.094690] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f5b93cbaa11e4791a97340be76ead4f5 [ 835.194952] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad5925a3-eb8c-4e61-81eb-47089ab91757 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.202285] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1851539e-aa59-4154-9d32-0142ca6bd7c4 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.236441] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b37aa566-1468-47bd-b9ac-683867183bba {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.242177] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e98580ef-5294-4150-abb8-3027c2a36619 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.255551] env[61273]: DEBUG nova.compute.provider_tree [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 835.256089] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg a0f6af140b704a3f82c7bf98f968f830 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 835.257608] env[61273]: INFO nova.scheduler.client.report [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Deleted allocations for instance ebc03a5c-984f-4d58-abb0-da555adcfbac [ 835.263828] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a0f6af140b704a3f82c7bf98f968f830 [ 835.264941] env[61273]: DEBUG nova.scheduler.client.report [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 835.267706] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg 96f6620298d54435902a951c3ba4bb1a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 835.268596] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Expecting reply to msg 3989071c1d904074a83610e8ca8a1cd8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 835.280545] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 96f6620298d54435902a951c3ba4bb1a [ 835.284997] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3989071c1d904074a83610e8ca8a1cd8 [ 835.585703] env[61273]: DEBUG nova.network.neutron [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.585703] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg 8aa3db75308c40c7a871925bd8916ed5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 835.597763] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8aa3db75308c40c7a871925bd8916ed5 [ 835.774316] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.835s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 835.774316] env[61273]: DEBUG nova.compute.manager [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 9debd209-244f-472a-b9d6-cf63bba98839] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 835.774316] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg c5581005ad0a446e9afd2500742c1386 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 835.774316] env[61273]: DEBUG oslo_concurrency.lockutils [None req-31d5c201-1a1c-4b46-b0ba-a40cb1ce0199 tempest-VolumesAdminNegativeTest-1865414953 tempest-VolumesAdminNegativeTest-1865414953-project-member] Lock "ebc03a5c-984f-4d58-abb0-da555adcfbac" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 109.465s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 835.774822] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.126s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 835.775415] env[61273]: INFO nova.compute.claims [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 835.776859] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg 500450495a4443b0a9fdcb647f44029b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 835.778080] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 06dc20bde6254e52b81bd4c1378a38b4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 835.799132] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 06dc20bde6254e52b81bd4c1378a38b4 [ 835.832901] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c5581005ad0a446e9afd2500742c1386 [ 835.841902] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 500450495a4443b0a9fdcb647f44029b [ 836.087656] env[61273]: INFO nova.compute.manager [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab] Took 1.02 seconds to deallocate network for instance. [ 836.089859] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg 7358ed85111b4d66943bff7835e31ba7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 836.101940] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 836.102389] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 836.102936] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg e829fd02a4294dc4b47e92947b86b52b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 836.115518] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e829fd02a4294dc4b47e92947b86b52b [ 836.126413] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7358ed85111b4d66943bff7835e31ba7 [ 836.275726] env[61273]: DEBUG nova.compute.utils [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 836.276397] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg 6f10c2b900bb4bc59b677179cb5b513c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 836.277350] env[61273]: DEBUG nova.compute.manager [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 9debd209-244f-472a-b9d6-cf63bba98839] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 836.277521] env[61273]: DEBUG nova.network.neutron [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 9debd209-244f-472a-b9d6-cf63bba98839] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 836.282132] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg 8b79f284567e400aa35dbad739bd2aab in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 836.282132] env[61273]: DEBUG nova.compute.manager [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 836.283029] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg fdfc05b826824a36add956492703cc67 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 836.288662] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8b79f284567e400aa35dbad739bd2aab [ 836.319427] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6f10c2b900bb4bc59b677179cb5b513c [ 836.322911] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fdfc05b826824a36add956492703cc67 [ 836.333566] env[61273]: DEBUG nova.policy [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5a8248232ec34833b3e19f7dfd5d63f3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f39358f90b3f468bbf87a5719c76273d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 836.589744] env[61273]: DEBUG nova.network.neutron [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 9debd209-244f-472a-b9d6-cf63bba98839] Successfully created port: 0f8ef003-3fad-4161-b909-098c989850aa {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 836.595347] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg 9bfada3ac674457c8561a10c769be622 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 836.614136] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 836.614309] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Starting heal instance info cache {{(pid=61273) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9926}} [ 836.614426] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Rebuilding the list of instances to heal {{(pid=61273) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 836.615049] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 398e523b390a471a9f734feb10b1e78b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 836.627655] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 398e523b390a471a9f734feb10b1e78b [ 836.631093] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9bfada3ac674457c8561a10c769be622 [ 836.780336] env[61273]: DEBUG nova.compute.manager [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 9debd209-244f-472a-b9d6-cf63bba98839] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 836.782125] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg 8ca53df02a3a45f69110b3ce6158de21 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 836.815485] env[61273]: DEBUG oslo_concurrency.lockutils [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 836.845098] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8ca53df02a3a45f69110b3ce6158de21 [ 837.047824] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0789308-cd99-42f9-864b-019ad0c2826c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.060017] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c4217f0-1f56-4e5b-8a86-c86233212995 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.120359] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] Skipping network cache update for instance because it is Building. {{(pid=61273) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 837.120604] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] [instance: 6494039f-3716-4174-92c0-15df384e0878] Skipping network cache update for instance because it is Building. {{(pid=61273) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 837.120813] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] [instance: 9debd209-244f-472a-b9d6-cf63bba98839] Skipping network cache update for instance because it is Building. {{(pid=61273) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 837.121017] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] Skipping network cache update for instance because it is Building. {{(pid=61273) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 837.121215] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Didn't find any instances for network info cache update. {{(pid=61273) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10012}} [ 837.122282] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-324e7dde-7708-4ddd-8da3-97687810b142 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.126683] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 837.126953] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 837.127714] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 837.127964] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 837.128213] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 837.128445] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 837.128645] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61273) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10545}} [ 837.128867] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager.update_available_resource {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 837.129332] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 25e110f64ac348e4b09a10c853811a8c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 837.135009] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94487ad8-815a-4e69-9f5a-8d887e07324d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.141553] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 25e110f64ac348e4b09a10c853811a8c [ 837.144681] env[61273]: INFO nova.scheduler.client.report [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Deleted allocations for instance 7327b3d9-6f7e-4203-b77b-bc0271e3a6ab [ 837.155336] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg 5d874d4575bc47e7ba490be21fabefd3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 837.171264] env[61273]: DEBUG nova.compute.provider_tree [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 837.171264] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg edc07225386240fa838cdf29762ebfa9 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 837.173634] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5d874d4575bc47e7ba490be21fabefd3 [ 837.190448] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg edc07225386240fa838cdf29762ebfa9 [ 837.288042] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg 66e4e3c1f2a842d9bd71e9b8cc12ad9f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 837.337573] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 66e4e3c1f2a842d9bd71e9b8cc12ad9f [ 837.635179] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 837.674058] env[61273]: DEBUG oslo_concurrency.lockutils [None req-0beaa677-c31c-4323-8d74-cc333845720c tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Lock "7327b3d9-6f7e-4203-b77b-bc0271e3a6ab" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 94.109s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 837.674875] env[61273]: DEBUG nova.scheduler.client.report [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 837.678191] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg d132bcd76aee48ea9c3cecf83b249524 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 837.678568] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg 772d5651251444aa90cbbe8f8c2d4545 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 837.690830] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d132bcd76aee48ea9c3cecf83b249524 [ 837.691628] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 772d5651251444aa90cbbe8f8c2d4545 [ 837.774998] env[61273]: DEBUG nova.compute.manager [req-f6f409fe-1f82-4207-9db9-f234df3cf960 req-db19a90a-4f6f-4ed1-bed3-38d7078a2b7a service nova] [instance: 9debd209-244f-472a-b9d6-cf63bba98839] Received event network-changed-0f8ef003-3fad-4161-b909-098c989850aa {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 837.775140] env[61273]: DEBUG nova.compute.manager [req-f6f409fe-1f82-4207-9db9-f234df3cf960 req-db19a90a-4f6f-4ed1-bed3-38d7078a2b7a service nova] [instance: 9debd209-244f-472a-b9d6-cf63bba98839] Refreshing instance network info cache due to event network-changed-0f8ef003-3fad-4161-b909-098c989850aa. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 837.775352] env[61273]: DEBUG oslo_concurrency.lockutils [req-f6f409fe-1f82-4207-9db9-f234df3cf960 req-db19a90a-4f6f-4ed1-bed3-38d7078a2b7a service nova] Acquiring lock "refresh_cache-9debd209-244f-472a-b9d6-cf63bba98839" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 837.775489] env[61273]: DEBUG oslo_concurrency.lockutils [req-f6f409fe-1f82-4207-9db9-f234df3cf960 req-db19a90a-4f6f-4ed1-bed3-38d7078a2b7a service nova] Acquired lock "refresh_cache-9debd209-244f-472a-b9d6-cf63bba98839" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 837.775645] env[61273]: DEBUG nova.network.neutron [req-f6f409fe-1f82-4207-9db9-f234df3cf960 req-db19a90a-4f6f-4ed1-bed3-38d7078a2b7a service nova] [instance: 9debd209-244f-472a-b9d6-cf63bba98839] Refreshing network info cache for port 0f8ef003-3fad-4161-b909-098c989850aa {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 837.776130] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-f6f409fe-1f82-4207-9db9-f234df3cf960 req-db19a90a-4f6f-4ed1-bed3-38d7078a2b7a service nova] Expecting reply to msg 6024d52a42804028b28a7323e8b62b45 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 837.782381] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6024d52a42804028b28a7323e8b62b45 [ 837.791446] env[61273]: DEBUG nova.compute.manager [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 9debd209-244f-472a-b9d6-cf63bba98839] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 837.820281] env[61273]: DEBUG nova.virt.hardware [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 837.820613] env[61273]: DEBUG nova.virt.hardware [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 837.820695] env[61273]: DEBUG nova.virt.hardware [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 837.820882] env[61273]: DEBUG nova.virt.hardware [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 837.821442] env[61273]: DEBUG nova.virt.hardware [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 837.821442] env[61273]: DEBUG nova.virt.hardware [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 837.821442] env[61273]: DEBUG nova.virt.hardware [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 837.821589] env[61273]: DEBUG nova.virt.hardware [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 837.821734] env[61273]: DEBUG nova.virt.hardware [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 837.821890] env[61273]: DEBUG nova.virt.hardware [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 837.822098] env[61273]: DEBUG nova.virt.hardware [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 837.823283] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e3f10af-31a9-4fc4-a6f8-434ab1c08518 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.833269] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-593f6d1a-0b75-4a73-b79d-9a75052a157a {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.057582] env[61273]: ERROR nova.compute.manager [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 0f8ef003-3fad-4161-b909-098c989850aa, please check neutron logs for more information. [ 838.057582] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 838.057582] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 838.057582] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 838.057582] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 838.057582] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 838.057582] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 838.057582] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 838.057582] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 838.057582] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 838.057582] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 838.057582] env[61273]: ERROR nova.compute.manager raise self.value [ 838.057582] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 838.057582] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 838.057582] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 838.057582] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 838.058101] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 838.058101] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 838.058101] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 0f8ef003-3fad-4161-b909-098c989850aa, please check neutron logs for more information. [ 838.058101] env[61273]: ERROR nova.compute.manager [ 838.058101] env[61273]: Traceback (most recent call last): [ 838.058101] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 838.058101] env[61273]: listener.cb(fileno) [ 838.058101] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 838.058101] env[61273]: result = function(*args, **kwargs) [ 838.058101] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 838.058101] env[61273]: return func(*args, **kwargs) [ 838.058101] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 838.058101] env[61273]: raise e [ 838.058101] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 838.058101] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 838.058101] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 838.058101] env[61273]: created_port_ids = self._update_ports_for_instance( [ 838.058101] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 838.058101] env[61273]: with excutils.save_and_reraise_exception(): [ 838.058101] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 838.058101] env[61273]: self.force_reraise() [ 838.058101] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 838.058101] env[61273]: raise self.value [ 838.058101] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 838.058101] env[61273]: updated_port = self._update_port( [ 838.058101] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 838.058101] env[61273]: _ensure_no_port_binding_failure(port) [ 838.058101] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 838.058101] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 838.058980] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 0f8ef003-3fad-4161-b909-098c989850aa, please check neutron logs for more information. [ 838.058980] env[61273]: Removing descriptor: 15 [ 838.058980] env[61273]: ERROR nova.compute.manager [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 9debd209-244f-472a-b9d6-cf63bba98839] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 0f8ef003-3fad-4161-b909-098c989850aa, please check neutron logs for more information. [ 838.058980] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] Traceback (most recent call last): [ 838.058980] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 838.058980] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] yield resources [ 838.058980] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 838.058980] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] self.driver.spawn(context, instance, image_meta, [ 838.058980] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 838.058980] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] self._vmops.spawn(context, instance, image_meta, injected_files, [ 838.058980] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 838.058980] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] vm_ref = self.build_virtual_machine(instance, [ 838.059399] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 838.059399] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] vif_infos = vmwarevif.get_vif_info(self._session, [ 838.059399] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 838.059399] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] for vif in network_info: [ 838.059399] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 838.059399] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] return self._sync_wrapper(fn, *args, **kwargs) [ 838.059399] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 838.059399] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] self.wait() [ 838.059399] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 838.059399] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] self[:] = self._gt.wait() [ 838.059399] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 838.059399] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] return self._exit_event.wait() [ 838.059399] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 838.059904] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] result = hub.switch() [ 838.059904] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 838.059904] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] return self.greenlet.switch() [ 838.059904] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 838.059904] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] result = function(*args, **kwargs) [ 838.059904] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 838.059904] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] return func(*args, **kwargs) [ 838.059904] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 838.059904] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] raise e [ 838.059904] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 838.059904] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] nwinfo = self.network_api.allocate_for_instance( [ 838.059904] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 838.059904] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] created_port_ids = self._update_ports_for_instance( [ 838.060355] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 838.060355] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] with excutils.save_and_reraise_exception(): [ 838.060355] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 838.060355] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] self.force_reraise() [ 838.060355] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 838.060355] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] raise self.value [ 838.060355] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 838.060355] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] updated_port = self._update_port( [ 838.060355] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 838.060355] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] _ensure_no_port_binding_failure(port) [ 838.060355] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 838.060355] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] raise exception.PortBindingFailed(port_id=port['id']) [ 838.060699] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] nova.exception.PortBindingFailed: Binding failed for port 0f8ef003-3fad-4161-b909-098c989850aa, please check neutron logs for more information. [ 838.060699] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] [ 838.060699] env[61273]: INFO nova.compute.manager [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 9debd209-244f-472a-b9d6-cf63bba98839] Terminating instance [ 838.060934] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Acquiring lock "refresh_cache-9debd209-244f-472a-b9d6-cf63bba98839" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 838.180255] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.406s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 838.180785] env[61273]: DEBUG nova.compute.manager [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 838.182526] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg e259de9291fd44a8bb9f9a454ed9b9c2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 838.183631] env[61273]: DEBUG nova.compute.manager [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 838.185444] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg c2ea675eafa549e1a6cfa23aa7f46192 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 838.186009] env[61273]: DEBUG oslo_concurrency.lockutils [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.601s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 838.187341] env[61273]: INFO nova.compute.claims [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 838.188812] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg 9e9caa34ce6e47ffa33ad5e91253189a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 838.233276] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c2ea675eafa549e1a6cfa23aa7f46192 [ 838.267037] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9e9caa34ce6e47ffa33ad5e91253189a [ 838.267037] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e259de9291fd44a8bb9f9a454ed9b9c2 [ 838.330612] env[61273]: DEBUG nova.network.neutron [req-f6f409fe-1f82-4207-9db9-f234df3cf960 req-db19a90a-4f6f-4ed1-bed3-38d7078a2b7a service nova] [instance: 9debd209-244f-472a-b9d6-cf63bba98839] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 838.448925] env[61273]: DEBUG nova.network.neutron [req-f6f409fe-1f82-4207-9db9-f234df3cf960 req-db19a90a-4f6f-4ed1-bed3-38d7078a2b7a service nova] [instance: 9debd209-244f-472a-b9d6-cf63bba98839] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 838.449440] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-f6f409fe-1f82-4207-9db9-f234df3cf960 req-db19a90a-4f6f-4ed1-bed3-38d7078a2b7a service nova] Expecting reply to msg f24890158ac94ae58068276cc2995208 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 838.463895] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f24890158ac94ae58068276cc2995208 [ 838.704184] env[61273]: DEBUG nova.compute.utils [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 838.704184] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg a8ff626e98f24196bde2b8ed3065a769 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 838.704184] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg 4891a5b727d74977b248e1ca5535cb56 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 838.704184] env[61273]: DEBUG nova.compute.manager [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 838.704184] env[61273]: DEBUG nova.network.neutron [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 838.705661] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a8ff626e98f24196bde2b8ed3065a769 [ 838.706413] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4891a5b727d74977b248e1ca5535cb56 [ 838.713932] env[61273]: DEBUG oslo_concurrency.lockutils [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 838.835080] env[61273]: DEBUG nova.policy [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5a8248232ec34833b3e19f7dfd5d63f3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f39358f90b3f468bbf87a5719c76273d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 838.957402] env[61273]: DEBUG oslo_concurrency.lockutils [req-f6f409fe-1f82-4207-9db9-f234df3cf960 req-db19a90a-4f6f-4ed1-bed3-38d7078a2b7a service nova] Releasing lock "refresh_cache-9debd209-244f-472a-b9d6-cf63bba98839" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 838.957828] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Acquired lock "refresh_cache-9debd209-244f-472a-b9d6-cf63bba98839" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 838.958009] env[61273]: DEBUG nova.network.neutron [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 9debd209-244f-472a-b9d6-cf63bba98839] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 838.958536] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg 74d72456d12e4e1990751360a2076059 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 838.965078] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 74d72456d12e4e1990751360a2076059 [ 838.983146] env[61273]: DEBUG oslo_concurrency.lockutils [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] Acquiring lock "d3dafd33-91f8-481d-8f40-8c2e98a7587d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 838.983373] env[61273]: DEBUG oslo_concurrency.lockutils [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] Lock "d3dafd33-91f8-481d-8f40-8c2e98a7587d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 839.161061] env[61273]: DEBUG nova.network.neutron [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] Successfully created port: ec4e0a91-b568-46e5-b1f8-d48f15b87d5c {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 839.200882] env[61273]: DEBUG nova.compute.manager [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 839.203071] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg 41de4b0aa8cb47239906b2d929c699ce in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 839.248942] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 41de4b0aa8cb47239906b2d929c699ce [ 839.473426] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efc6e314-3294-4ca1-a134-bee0f37270b7 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.481509] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56ebfb25-01d0-4c03-a5f5-b8135de2a88c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.522544] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b30b2275-d09d-4096-bfc0-54a218d304c5 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.527551] env[61273]: DEBUG nova.network.neutron [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 9debd209-244f-472a-b9d6-cf63bba98839] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 839.533215] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c4db8a3-1d3a-4038-a875-de2460459b3d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.548808] env[61273]: DEBUG nova.compute.provider_tree [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 839.549311] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg f17f035567c344f9884a36f8bdcc2c0d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 839.556328] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f17f035567c344f9884a36f8bdcc2c0d [ 839.710687] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg ee8e78db7c3942e2bc76282ca3d038db in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 839.762050] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ee8e78db7c3942e2bc76282ca3d038db [ 839.820530] env[61273]: DEBUG nova.compute.manager [req-7bac106f-c9e0-4674-adbf-0ea69ff79363 req-fc034aa9-fdad-42b2-846b-53be3a913b20 service nova] [instance: 9debd209-244f-472a-b9d6-cf63bba98839] Received event network-vif-deleted-0f8ef003-3fad-4161-b909-098c989850aa {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 839.896691] env[61273]: DEBUG nova.network.neutron [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 9debd209-244f-472a-b9d6-cf63bba98839] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.897184] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg ff343d2ec35141a3967c719250fa83c0 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 839.905522] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ff343d2ec35141a3967c719250fa83c0 [ 840.051618] env[61273]: DEBUG nova.scheduler.client.report [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 840.054352] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg e6c23f5cc2084eec91ecec6be7deb57b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 840.069838] env[61273]: ERROR nova.compute.manager [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port ec4e0a91-b568-46e5-b1f8-d48f15b87d5c, please check neutron logs for more information. [ 840.069838] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 840.069838] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 840.069838] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 840.069838] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 840.069838] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 840.069838] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 840.069838] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 840.069838] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 840.069838] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 840.069838] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 840.069838] env[61273]: ERROR nova.compute.manager raise self.value [ 840.069838] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 840.069838] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 840.069838] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 840.069838] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 840.070405] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 840.070405] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 840.070405] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port ec4e0a91-b568-46e5-b1f8-d48f15b87d5c, please check neutron logs for more information. [ 840.070405] env[61273]: ERROR nova.compute.manager [ 840.070405] env[61273]: Traceback (most recent call last): [ 840.070405] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 840.070405] env[61273]: listener.cb(fileno) [ 840.070405] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 840.070405] env[61273]: result = function(*args, **kwargs) [ 840.070405] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 840.070405] env[61273]: return func(*args, **kwargs) [ 840.070405] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 840.070405] env[61273]: raise e [ 840.070405] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 840.070405] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 840.070405] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 840.070405] env[61273]: created_port_ids = self._update_ports_for_instance( [ 840.070405] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 840.070405] env[61273]: with excutils.save_and_reraise_exception(): [ 840.070405] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 840.070405] env[61273]: self.force_reraise() [ 840.070405] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 840.070405] env[61273]: raise self.value [ 840.070405] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 840.070405] env[61273]: updated_port = self._update_port( [ 840.070405] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 840.070405] env[61273]: _ensure_no_port_binding_failure(port) [ 840.070405] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 840.070405] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 840.071201] env[61273]: nova.exception.PortBindingFailed: Binding failed for port ec4e0a91-b568-46e5-b1f8-d48f15b87d5c, please check neutron logs for more information. [ 840.071201] env[61273]: Removing descriptor: 15 [ 840.078350] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e6c23f5cc2084eec91ecec6be7deb57b [ 840.213787] env[61273]: DEBUG nova.compute.manager [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 840.243753] env[61273]: DEBUG nova.virt.hardware [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 840.243995] env[61273]: DEBUG nova.virt.hardware [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 840.244181] env[61273]: DEBUG nova.virt.hardware [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 840.244420] env[61273]: DEBUG nova.virt.hardware [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 840.244525] env[61273]: DEBUG nova.virt.hardware [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 840.244668] env[61273]: DEBUG nova.virt.hardware [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 840.244894] env[61273]: DEBUG nova.virt.hardware [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 840.245135] env[61273]: DEBUG nova.virt.hardware [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 840.246305] env[61273]: DEBUG nova.virt.hardware [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 840.246305] env[61273]: DEBUG nova.virt.hardware [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 840.246305] env[61273]: DEBUG nova.virt.hardware [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 840.247080] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b2570f3-a595-422f-8db4-115acfac6186 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.259120] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee6f470e-51ce-4bac-b583-42c692bc0069 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.275355] env[61273]: ERROR nova.compute.manager [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port ec4e0a91-b568-46e5-b1f8-d48f15b87d5c, please check neutron logs for more information. [ 840.275355] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] Traceback (most recent call last): [ 840.275355] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 840.275355] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] yield resources [ 840.275355] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 840.275355] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] self.driver.spawn(context, instance, image_meta, [ 840.275355] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 840.275355] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] self._vmops.spawn(context, instance, image_meta, injected_files, [ 840.275355] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 840.275355] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] vm_ref = self.build_virtual_machine(instance, [ 840.275355] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 840.275756] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] vif_infos = vmwarevif.get_vif_info(self._session, [ 840.275756] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 840.275756] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] for vif in network_info: [ 840.275756] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 840.275756] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] return self._sync_wrapper(fn, *args, **kwargs) [ 840.275756] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 840.275756] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] self.wait() [ 840.275756] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 840.275756] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] self[:] = self._gt.wait() [ 840.275756] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 840.275756] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] return self._exit_event.wait() [ 840.275756] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 840.275756] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] current.throw(*self._exc) [ 840.276174] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 840.276174] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] result = function(*args, **kwargs) [ 840.276174] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 840.276174] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] return func(*args, **kwargs) [ 840.276174] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 840.276174] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] raise e [ 840.276174] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 840.276174] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] nwinfo = self.network_api.allocate_for_instance( [ 840.276174] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 840.276174] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] created_port_ids = self._update_ports_for_instance( [ 840.276174] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 840.276174] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] with excutils.save_and_reraise_exception(): [ 840.276174] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 840.276548] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] self.force_reraise() [ 840.276548] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 840.276548] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] raise self.value [ 840.276548] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 840.276548] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] updated_port = self._update_port( [ 840.276548] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 840.276548] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] _ensure_no_port_binding_failure(port) [ 840.276548] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 840.276548] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] raise exception.PortBindingFailed(port_id=port['id']) [ 840.276548] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] nova.exception.PortBindingFailed: Binding failed for port ec4e0a91-b568-46e5-b1f8-d48f15b87d5c, please check neutron logs for more information. [ 840.276548] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] [ 840.276548] env[61273]: INFO nova.compute.manager [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] Terminating instance [ 840.277698] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Acquiring lock "refresh_cache-faaabf1e-74af-4cfa-ba1c-e2c2fabad124" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 840.277859] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Acquired lock "refresh_cache-faaabf1e-74af-4cfa-ba1c-e2c2fabad124" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.278024] env[61273]: DEBUG nova.network.neutron [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 840.278432] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg dfdeb47947174778bad9d2e9b44395c0 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 840.284592] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dfdeb47947174778bad9d2e9b44395c0 [ 840.398972] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Releasing lock "refresh_cache-9debd209-244f-472a-b9d6-cf63bba98839" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 840.399405] env[61273]: DEBUG nova.compute.manager [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 9debd209-244f-472a-b9d6-cf63bba98839] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 840.399677] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 9debd209-244f-472a-b9d6-cf63bba98839] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 840.400081] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-508daea7-5764-4f6f-9d13-c2d5f88c9aff {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.409568] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f1b7b71-471b-43f1-a5ad-87fe57aaeedc {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.432296] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 9debd209-244f-472a-b9d6-cf63bba98839] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9debd209-244f-472a-b9d6-cf63bba98839 could not be found. [ 840.432521] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 9debd209-244f-472a-b9d6-cf63bba98839] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 840.432722] env[61273]: INFO nova.compute.manager [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 9debd209-244f-472a-b9d6-cf63bba98839] Took 0.03 seconds to destroy the instance on the hypervisor. [ 840.432984] env[61273]: DEBUG oslo.service.loopingcall [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 840.433218] env[61273]: DEBUG nova.compute.manager [-] [instance: 9debd209-244f-472a-b9d6-cf63bba98839] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 840.433291] env[61273]: DEBUG nova.network.neutron [-] [instance: 9debd209-244f-472a-b9d6-cf63bba98839] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 840.449631] env[61273]: DEBUG nova.network.neutron [-] [instance: 9debd209-244f-472a-b9d6-cf63bba98839] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 840.450431] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg c624c764618f49aa8e66b24bc2f1754e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 840.457131] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c624c764618f49aa8e66b24bc2f1754e [ 840.521956] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 08748a136d784ff0ae5c2a3f8bfc4831 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 840.541171] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 08748a136d784ff0ae5c2a3f8bfc4831 [ 840.557537] env[61273]: DEBUG oslo_concurrency.lockutils [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.371s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 840.558061] env[61273]: DEBUG nova.compute.manager [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 840.559729] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg 161d4cc163ef4425977322cf1a20caca in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 840.560771] env[61273]: DEBUG oslo_concurrency.lockutils [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.046s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 840.562140] env[61273]: INFO nova.compute.claims [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 840.563621] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg 5930dd5a5df84755a7697c629c97f172 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 840.598329] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 161d4cc163ef4425977322cf1a20caca [ 840.601168] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5930dd5a5df84755a7697c629c97f172 [ 840.804639] env[61273]: DEBUG nova.network.neutron [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 840.925980] env[61273]: DEBUG nova.network.neutron [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 840.926518] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg addcd4d6506f4b1d859e81e041461270 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 840.935093] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg addcd4d6506f4b1d859e81e041461270 [ 840.952245] env[61273]: DEBUG nova.network.neutron [-] [instance: 9debd209-244f-472a-b9d6-cf63bba98839] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 840.952714] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 23711b16d9f14ee6a7010a40a19fa89f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 840.962900] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 23711b16d9f14ee6a7010a40a19fa89f [ 840.998884] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Acquiring lock "f9e23014-2fe0-4aab-b03c-8759dc1e5eb0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 840.999388] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Lock "f9e23014-2fe0-4aab-b03c-8759dc1e5eb0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 841.066321] env[61273]: DEBUG nova.compute.utils [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 841.067289] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg 98265b1c1b384bdd8d44daac9b360739 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 841.074130] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg 6941581abcb948cea54874541153afb6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 841.075444] env[61273]: DEBUG nova.compute.manager [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 841.075739] env[61273]: DEBUG nova.network.neutron [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 841.085919] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 98265b1c1b384bdd8d44daac9b360739 [ 841.086028] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6941581abcb948cea54874541153afb6 [ 841.120171] env[61273]: DEBUG nova.policy [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5a8248232ec34833b3e19f7dfd5d63f3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f39358f90b3f468bbf87a5719c76273d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 841.429022] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Releasing lock "refresh_cache-faaabf1e-74af-4cfa-ba1c-e2c2fabad124" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 841.429496] env[61273]: DEBUG nova.compute.manager [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 841.429720] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 841.430036] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1b2b7ea7-48e1-4512-8369-60df1683a220 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.444604] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05a8c3c2-5f4f-4bc0-aa0f-bc82a2f624fb {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.455994] env[61273]: DEBUG nova.network.neutron [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] Successfully created port: 5eb78d19-b6bf-451e-880f-0ae3168b6fd2 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 841.458534] env[61273]: INFO nova.compute.manager [-] [instance: 9debd209-244f-472a-b9d6-cf63bba98839] Took 1.02 seconds to deallocate network for instance. [ 841.461043] env[61273]: DEBUG nova.compute.claims [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 9debd209-244f-472a-b9d6-cf63bba98839] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 841.461222] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 841.470744] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance faaabf1e-74af-4cfa-ba1c-e2c2fabad124 could not be found. [ 841.471018] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 841.471426] env[61273]: INFO nova.compute.manager [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] Took 0.04 seconds to destroy the instance on the hypervisor. [ 841.471722] env[61273]: DEBUG oslo.service.loopingcall [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 841.471963] env[61273]: DEBUG nova.compute.manager [-] [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 841.472090] env[61273]: DEBUG nova.network.neutron [-] [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 841.498091] env[61273]: DEBUG nova.network.neutron [-] [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 841.498580] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 25cc9477d77a43c689082b5b1365eefb in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 841.505282] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 25cc9477d77a43c689082b5b1365eefb [ 841.576678] env[61273]: DEBUG nova.compute.manager [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 841.578567] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg edf95e942bbf4f5fae976f48b5b27778 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 841.619745] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg edf95e942bbf4f5fae976f48b5b27778 [ 841.827030] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9ab6cd7-1425-409c-a36b-7cadcf78dd40 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.834716] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be1e70b6-529a-4803-9d46-1a25a94eb6a8 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.866445] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f4469db-45b6-4d9d-b301-e43efac15e6c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.869833] env[61273]: DEBUG nova.compute.manager [req-38929426-0175-4599-9b7d-403fb5653fed req-865fd2fc-fa6e-4771-be6a-d388c107ebf3 service nova] [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] Received event network-changed-ec4e0a91-b568-46e5-b1f8-d48f15b87d5c {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 841.870017] env[61273]: DEBUG nova.compute.manager [req-38929426-0175-4599-9b7d-403fb5653fed req-865fd2fc-fa6e-4771-be6a-d388c107ebf3 service nova] [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] Refreshing instance network info cache due to event network-changed-ec4e0a91-b568-46e5-b1f8-d48f15b87d5c. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 841.870229] env[61273]: DEBUG oslo_concurrency.lockutils [req-38929426-0175-4599-9b7d-403fb5653fed req-865fd2fc-fa6e-4771-be6a-d388c107ebf3 service nova] Acquiring lock "refresh_cache-faaabf1e-74af-4cfa-ba1c-e2c2fabad124" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 841.870370] env[61273]: DEBUG oslo_concurrency.lockutils [req-38929426-0175-4599-9b7d-403fb5653fed req-865fd2fc-fa6e-4771-be6a-d388c107ebf3 service nova] Acquired lock "refresh_cache-faaabf1e-74af-4cfa-ba1c-e2c2fabad124" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.870529] env[61273]: DEBUG nova.network.neutron [req-38929426-0175-4599-9b7d-403fb5653fed req-865fd2fc-fa6e-4771-be6a-d388c107ebf3 service nova] [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] Refreshing network info cache for port ec4e0a91-b568-46e5-b1f8-d48f15b87d5c {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 841.870941] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-38929426-0175-4599-9b7d-403fb5653fed req-865fd2fc-fa6e-4771-be6a-d388c107ebf3 service nova] Expecting reply to msg d055aabb6a64456d8fdb3793b1ce9f76 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 841.880176] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4169610-0109-47f1-80be-ee4d01b621aa {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.884119] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d055aabb6a64456d8fdb3793b1ce9f76 [ 841.896323] env[61273]: DEBUG nova.compute.provider_tree [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 841.896813] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg 66acc39021ea4887b856d1472101e47b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 841.903694] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 66acc39021ea4887b856d1472101e47b [ 842.000381] env[61273]: DEBUG nova.network.neutron [-] [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 842.000894] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 0c89fee6f9544945beaec1b0d3d5d1a3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 842.010318] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0c89fee6f9544945beaec1b0d3d5d1a3 [ 842.085016] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg e9b5466d19d247ddb9999fbe31871f23 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 842.135442] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e9b5466d19d247ddb9999fbe31871f23 [ 842.285306] env[61273]: ERROR nova.compute.manager [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 5eb78d19-b6bf-451e-880f-0ae3168b6fd2, please check neutron logs for more information. [ 842.285306] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 842.285306] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 842.285306] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 842.285306] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 842.285306] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 842.285306] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 842.285306] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 842.285306] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 842.285306] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 842.285306] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 842.285306] env[61273]: ERROR nova.compute.manager raise self.value [ 842.285306] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 842.285306] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 842.285306] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 842.285306] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 842.285809] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 842.285809] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 842.285809] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 5eb78d19-b6bf-451e-880f-0ae3168b6fd2, please check neutron logs for more information. [ 842.285809] env[61273]: ERROR nova.compute.manager [ 842.285809] env[61273]: Traceback (most recent call last): [ 842.285809] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 842.285809] env[61273]: listener.cb(fileno) [ 842.285809] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 842.285809] env[61273]: result = function(*args, **kwargs) [ 842.285809] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 842.285809] env[61273]: return func(*args, **kwargs) [ 842.285809] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 842.285809] env[61273]: raise e [ 842.285809] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 842.285809] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 842.285809] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 842.285809] env[61273]: created_port_ids = self._update_ports_for_instance( [ 842.285809] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 842.285809] env[61273]: with excutils.save_and_reraise_exception(): [ 842.285809] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 842.285809] env[61273]: self.force_reraise() [ 842.285809] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 842.285809] env[61273]: raise self.value [ 842.285809] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 842.285809] env[61273]: updated_port = self._update_port( [ 842.285809] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 842.285809] env[61273]: _ensure_no_port_binding_failure(port) [ 842.285809] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 842.285809] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 842.286623] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 5eb78d19-b6bf-451e-880f-0ae3168b6fd2, please check neutron logs for more information. [ 842.286623] env[61273]: Removing descriptor: 15 [ 842.389534] env[61273]: DEBUG nova.network.neutron [req-38929426-0175-4599-9b7d-403fb5653fed req-865fd2fc-fa6e-4771-be6a-d388c107ebf3 service nova] [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 842.398915] env[61273]: DEBUG nova.scheduler.client.report [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 842.401610] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg fdb1fcbdc5fd466db3839c163799fc14 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 842.412602] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fdb1fcbdc5fd466db3839c163799fc14 [ 842.467741] env[61273]: DEBUG nova.network.neutron [req-38929426-0175-4599-9b7d-403fb5653fed req-865fd2fc-fa6e-4771-be6a-d388c107ebf3 service nova] [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 842.468322] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-38929426-0175-4599-9b7d-403fb5653fed req-865fd2fc-fa6e-4771-be6a-d388c107ebf3 service nova] Expecting reply to msg f14ca7b89d9642d586b76b1fc53f1493 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 842.479549] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f14ca7b89d9642d586b76b1fc53f1493 [ 842.510491] env[61273]: INFO nova.compute.manager [-] [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] Took 1.04 seconds to deallocate network for instance. [ 842.512951] env[61273]: DEBUG nova.compute.claims [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 842.513279] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 842.588656] env[61273]: DEBUG nova.compute.manager [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 842.608685] env[61273]: DEBUG nova.virt.hardware [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 842.608933] env[61273]: DEBUG nova.virt.hardware [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 842.609093] env[61273]: DEBUG nova.virt.hardware [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 842.609299] env[61273]: DEBUG nova.virt.hardware [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 842.609448] env[61273]: DEBUG nova.virt.hardware [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 842.609617] env[61273]: DEBUG nova.virt.hardware [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 842.609913] env[61273]: DEBUG nova.virt.hardware [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 842.610111] env[61273]: DEBUG nova.virt.hardware [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 842.610237] env[61273]: DEBUG nova.virt.hardware [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 842.610416] env[61273]: DEBUG nova.virt.hardware [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 842.610557] env[61273]: DEBUG nova.virt.hardware [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 842.611736] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4aad7022-5cbf-46f1-9224-2917818e876d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.619581] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11ee8816-8de5-4e0b-924c-02c988fd0f38 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.633448] env[61273]: ERROR nova.compute.manager [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 5eb78d19-b6bf-451e-880f-0ae3168b6fd2, please check neutron logs for more information. [ 842.633448] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] Traceback (most recent call last): [ 842.633448] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 842.633448] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] yield resources [ 842.633448] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 842.633448] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] self.driver.spawn(context, instance, image_meta, [ 842.633448] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 842.633448] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] self._vmops.spawn(context, instance, image_meta, injected_files, [ 842.633448] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 842.633448] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] vm_ref = self.build_virtual_machine(instance, [ 842.633448] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 842.633797] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] vif_infos = vmwarevif.get_vif_info(self._session, [ 842.633797] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 842.633797] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] for vif in network_info: [ 842.633797] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 842.633797] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] return self._sync_wrapper(fn, *args, **kwargs) [ 842.633797] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 842.633797] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] self.wait() [ 842.633797] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 842.633797] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] self[:] = self._gt.wait() [ 842.633797] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 842.633797] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] return self._exit_event.wait() [ 842.633797] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 842.633797] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] current.throw(*self._exc) [ 842.634189] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 842.634189] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] result = function(*args, **kwargs) [ 842.634189] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 842.634189] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] return func(*args, **kwargs) [ 842.634189] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 842.634189] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] raise e [ 842.634189] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 842.634189] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] nwinfo = self.network_api.allocate_for_instance( [ 842.634189] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 842.634189] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] created_port_ids = self._update_ports_for_instance( [ 842.634189] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 842.634189] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] with excutils.save_and_reraise_exception(): [ 842.634189] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 842.634567] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] self.force_reraise() [ 842.634567] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 842.634567] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] raise self.value [ 842.634567] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 842.634567] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] updated_port = self._update_port( [ 842.634567] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 842.634567] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] _ensure_no_port_binding_failure(port) [ 842.634567] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 842.634567] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] raise exception.PortBindingFailed(port_id=port['id']) [ 842.634567] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] nova.exception.PortBindingFailed: Binding failed for port 5eb78d19-b6bf-451e-880f-0ae3168b6fd2, please check neutron logs for more information. [ 842.634567] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] [ 842.634567] env[61273]: INFO nova.compute.manager [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] Terminating instance [ 842.635735] env[61273]: DEBUG oslo_concurrency.lockutils [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Acquiring lock "refresh_cache-98f63a99-f1b8-4420-978d-7b69c39a2692" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 842.635897] env[61273]: DEBUG oslo_concurrency.lockutils [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Acquired lock "refresh_cache-98f63a99-f1b8-4420-978d-7b69c39a2692" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.636086] env[61273]: DEBUG nova.network.neutron [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 842.636499] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg 977edabf70bf428eac8be188f3973413 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 842.642988] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 977edabf70bf428eac8be188f3973413 [ 842.904489] env[61273]: DEBUG oslo_concurrency.lockutils [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.344s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 842.904983] env[61273]: DEBUG nova.compute.manager [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 842.906606] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg 10151b0393dd4229a24392b1b3337077 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 842.908019] env[61273]: DEBUG oslo_concurrency.lockutils [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.638s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 842.909372] env[61273]: INFO nova.compute.claims [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: ae9866e2-544a-4d26-b198-87110f42f054] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 842.910820] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg b06b8ed58078477ea9e16afb84e2604c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 842.940391] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 10151b0393dd4229a24392b1b3337077 [ 842.948986] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b06b8ed58078477ea9e16afb84e2604c [ 842.971257] env[61273]: DEBUG oslo_concurrency.lockutils [req-38929426-0175-4599-9b7d-403fb5653fed req-865fd2fc-fa6e-4771-be6a-d388c107ebf3 service nova] Releasing lock "refresh_cache-faaabf1e-74af-4cfa-ba1c-e2c2fabad124" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 842.971497] env[61273]: DEBUG nova.compute.manager [req-38929426-0175-4599-9b7d-403fb5653fed req-865fd2fc-fa6e-4771-be6a-d388c107ebf3 service nova] [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] Received event network-vif-deleted-ec4e0a91-b568-46e5-b1f8-d48f15b87d5c {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 843.153055] env[61273]: DEBUG nova.network.neutron [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 843.233991] env[61273]: DEBUG nova.network.neutron [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 843.234507] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg cbd84d44ed614c208b73ffaf1c32acf5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 843.242587] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cbd84d44ed614c208b73ffaf1c32acf5 [ 843.414221] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg e871a83b817444cb905d0de9a4892657 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 843.416039] env[61273]: DEBUG nova.compute.utils [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 843.416630] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg 93b4ff7a972f40d7837438ff878fe6db in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 843.417597] env[61273]: DEBUG nova.compute.manager [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 843.417769] env[61273]: DEBUG nova.network.neutron [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 843.423254] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e871a83b817444cb905d0de9a4892657 [ 843.432167] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 93b4ff7a972f40d7837438ff878fe6db [ 843.455492] env[61273]: DEBUG nova.policy [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6fe5dcf103624f20b3e75d7f2bbd54b1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f921325e5f6b410fa0137369ca3a9c89', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 843.706894] env[61273]: DEBUG nova.network.neutron [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] Successfully created port: d806ad66-e122-4622-8f56-bfe27013f645 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 843.736653] env[61273]: DEBUG oslo_concurrency.lockutils [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Releasing lock "refresh_cache-98f63a99-f1b8-4420-978d-7b69c39a2692" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 843.737071] env[61273]: DEBUG nova.compute.manager [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 843.737259] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 843.737558] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-568c1fcf-1dee-46e7-b54d-af0b91b3d11d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.748065] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c2264db-be3b-41d2-89b7-11feaed34f8f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.768761] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 98f63a99-f1b8-4420-978d-7b69c39a2692 could not be found. [ 843.769069] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 843.769270] env[61273]: INFO nova.compute.manager [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] Took 0.03 seconds to destroy the instance on the hypervisor. [ 843.769501] env[61273]: DEBUG oslo.service.loopingcall [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 843.769727] env[61273]: DEBUG nova.compute.manager [-] [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 843.769821] env[61273]: DEBUG nova.network.neutron [-] [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 843.783224] env[61273]: DEBUG nova.network.neutron [-] [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 843.783679] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 13225093422344f6902ea1ea4510ce7a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 843.793484] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 13225093422344f6902ea1ea4510ce7a [ 843.883505] env[61273]: DEBUG nova.compute.manager [req-84a0858b-42fa-423a-8f54-f9a7d28f054d req-685ae302-bada-4481-bab4-a7904b3d5fd8 service nova] [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] Received event network-changed-5eb78d19-b6bf-451e-880f-0ae3168b6fd2 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 843.883754] env[61273]: DEBUG nova.compute.manager [req-84a0858b-42fa-423a-8f54-f9a7d28f054d req-685ae302-bada-4481-bab4-a7904b3d5fd8 service nova] [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] Refreshing instance network info cache due to event network-changed-5eb78d19-b6bf-451e-880f-0ae3168b6fd2. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 843.884082] env[61273]: DEBUG oslo_concurrency.lockutils [req-84a0858b-42fa-423a-8f54-f9a7d28f054d req-685ae302-bada-4481-bab4-a7904b3d5fd8 service nova] Acquiring lock "refresh_cache-98f63a99-f1b8-4420-978d-7b69c39a2692" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 843.884153] env[61273]: DEBUG oslo_concurrency.lockutils [req-84a0858b-42fa-423a-8f54-f9a7d28f054d req-685ae302-bada-4481-bab4-a7904b3d5fd8 service nova] Acquired lock "refresh_cache-98f63a99-f1b8-4420-978d-7b69c39a2692" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 843.884315] env[61273]: DEBUG nova.network.neutron [req-84a0858b-42fa-423a-8f54-f9a7d28f054d req-685ae302-bada-4481-bab4-a7904b3d5fd8 service nova] [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] Refreshing network info cache for port 5eb78d19-b6bf-451e-880f-0ae3168b6fd2 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 843.884747] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-84a0858b-42fa-423a-8f54-f9a7d28f054d req-685ae302-bada-4481-bab4-a7904b3d5fd8 service nova] Expecting reply to msg 9dc800991dc84d03b3949e8d0752e514 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 843.891515] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9dc800991dc84d03b3949e8d0752e514 [ 843.922664] env[61273]: DEBUG nova.compute.manager [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 843.924202] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg f660be037f6c4f69a8627865813471ed in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 843.971188] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f660be037f6c4f69a8627865813471ed [ 844.173787] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab8242d7-a350-4ba6-b010-8349db6e0706 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.182702] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eda5fc53-2d78-4e3b-a4f7-2ddd4807c5ee {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.221074] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05a7b376-c9bc-47b3-a98b-0205c8b3a0b3 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.229009] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d73a1a27-fb1d-4410-bdcb-9eb3fcc00cbf {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.245273] env[61273]: DEBUG nova.compute.provider_tree [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 844.245804] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg 3e6ee2fd6e0145fd8fac98027f5d77e2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 844.252729] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3e6ee2fd6e0145fd8fac98027f5d77e2 [ 844.289960] env[61273]: DEBUG nova.network.neutron [-] [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 844.290641] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg bfbd5e12045947b4bd8b568283e86dcf in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 844.299224] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bfbd5e12045947b4bd8b568283e86dcf [ 844.402329] env[61273]: DEBUG nova.network.neutron [req-84a0858b-42fa-423a-8f54-f9a7d28f054d req-685ae302-bada-4481-bab4-a7904b3d5fd8 service nova] [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 844.438173] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg 25602a0f775d4b9289564a1e60bd0fd9 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 844.470998] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 25602a0f775d4b9289564a1e60bd0fd9 [ 844.472402] env[61273]: DEBUG nova.network.neutron [req-84a0858b-42fa-423a-8f54-f9a7d28f054d req-685ae302-bada-4481-bab4-a7904b3d5fd8 service nova] [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 844.472914] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-84a0858b-42fa-423a-8f54-f9a7d28f054d req-685ae302-bada-4481-bab4-a7904b3d5fd8 service nova] Expecting reply to msg e524a913bef24e6abcb27a360724dcdb in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 844.480443] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e524a913bef24e6abcb27a360724dcdb [ 844.514328] env[61273]: ERROR nova.compute.manager [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port d806ad66-e122-4622-8f56-bfe27013f645, please check neutron logs for more information. [ 844.514328] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 844.514328] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 844.514328] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 844.514328] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 844.514328] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 844.514328] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 844.514328] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 844.514328] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 844.514328] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 844.514328] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 844.514328] env[61273]: ERROR nova.compute.manager raise self.value [ 844.514328] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 844.514328] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 844.514328] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 844.514328] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 844.514812] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 844.514812] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 844.514812] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port d806ad66-e122-4622-8f56-bfe27013f645, please check neutron logs for more information. [ 844.514812] env[61273]: ERROR nova.compute.manager [ 844.514812] env[61273]: Traceback (most recent call last): [ 844.514812] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 844.514812] env[61273]: listener.cb(fileno) [ 844.514812] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 844.514812] env[61273]: result = function(*args, **kwargs) [ 844.514812] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 844.514812] env[61273]: return func(*args, **kwargs) [ 844.514812] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 844.514812] env[61273]: raise e [ 844.514812] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 844.514812] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 844.514812] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 844.514812] env[61273]: created_port_ids = self._update_ports_for_instance( [ 844.514812] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 844.514812] env[61273]: with excutils.save_and_reraise_exception(): [ 844.514812] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 844.514812] env[61273]: self.force_reraise() [ 844.514812] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 844.514812] env[61273]: raise self.value [ 844.514812] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 844.514812] env[61273]: updated_port = self._update_port( [ 844.514812] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 844.514812] env[61273]: _ensure_no_port_binding_failure(port) [ 844.514812] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 844.514812] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 844.515700] env[61273]: nova.exception.PortBindingFailed: Binding failed for port d806ad66-e122-4622-8f56-bfe27013f645, please check neutron logs for more information. [ 844.515700] env[61273]: Removing descriptor: 15 [ 844.748745] env[61273]: DEBUG nova.scheduler.client.report [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 844.751291] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg 046b029285bd4105a60ee05b7eea3415 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 844.763102] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 046b029285bd4105a60ee05b7eea3415 [ 844.793365] env[61273]: INFO nova.compute.manager [-] [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] Took 1.02 seconds to deallocate network for instance. [ 844.797314] env[61273]: DEBUG nova.compute.claims [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 844.797582] env[61273]: DEBUG oslo_concurrency.lockutils [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 844.943052] env[61273]: DEBUG nova.compute.manager [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 844.969132] env[61273]: DEBUG nova.virt.hardware [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 844.969407] env[61273]: DEBUG nova.virt.hardware [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 844.969597] env[61273]: DEBUG nova.virt.hardware [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 844.969858] env[61273]: DEBUG nova.virt.hardware [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 844.970006] env[61273]: DEBUG nova.virt.hardware [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 844.970149] env[61273]: DEBUG nova.virt.hardware [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 844.970350] env[61273]: DEBUG nova.virt.hardware [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 844.970504] env[61273]: DEBUG nova.virt.hardware [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 844.970663] env[61273]: DEBUG nova.virt.hardware [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 844.970819] env[61273]: DEBUG nova.virt.hardware [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 844.971049] env[61273]: DEBUG nova.virt.hardware [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 844.972172] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91a2e879-5bc3-4a81-9cbc-62c9e553af96 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.979853] env[61273]: DEBUG oslo_concurrency.lockutils [req-84a0858b-42fa-423a-8f54-f9a7d28f054d req-685ae302-bada-4481-bab4-a7904b3d5fd8 service nova] Releasing lock "refresh_cache-98f63a99-f1b8-4420-978d-7b69c39a2692" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 844.980102] env[61273]: DEBUG nova.compute.manager [req-84a0858b-42fa-423a-8f54-f9a7d28f054d req-685ae302-bada-4481-bab4-a7904b3d5fd8 service nova] [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] Received event network-vif-deleted-5eb78d19-b6bf-451e-880f-0ae3168b6fd2 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 844.981501] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aedd982-970d-4212-a39f-a9538f5cc1f9 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.995212] env[61273]: ERROR nova.compute.manager [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port d806ad66-e122-4622-8f56-bfe27013f645, please check neutron logs for more information. [ 844.995212] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] Traceback (most recent call last): [ 844.995212] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 844.995212] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] yield resources [ 844.995212] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 844.995212] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] self.driver.spawn(context, instance, image_meta, [ 844.995212] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 844.995212] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] self._vmops.spawn(context, instance, image_meta, injected_files, [ 844.995212] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 844.995212] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] vm_ref = self.build_virtual_machine(instance, [ 844.995212] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 844.995576] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] vif_infos = vmwarevif.get_vif_info(self._session, [ 844.995576] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 844.995576] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] for vif in network_info: [ 844.995576] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 844.995576] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] return self._sync_wrapper(fn, *args, **kwargs) [ 844.995576] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 844.995576] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] self.wait() [ 844.995576] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 844.995576] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] self[:] = self._gt.wait() [ 844.995576] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 844.995576] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] return self._exit_event.wait() [ 844.995576] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 844.995576] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] current.throw(*self._exc) [ 844.995919] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 844.995919] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] result = function(*args, **kwargs) [ 844.995919] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 844.995919] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] return func(*args, **kwargs) [ 844.995919] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 844.995919] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] raise e [ 844.995919] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 844.995919] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] nwinfo = self.network_api.allocate_for_instance( [ 844.995919] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 844.995919] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] created_port_ids = self._update_ports_for_instance( [ 844.995919] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 844.995919] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] with excutils.save_and_reraise_exception(): [ 844.995919] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 844.996312] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] self.force_reraise() [ 844.996312] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 844.996312] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] raise self.value [ 844.996312] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 844.996312] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] updated_port = self._update_port( [ 844.996312] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 844.996312] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] _ensure_no_port_binding_failure(port) [ 844.996312] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 844.996312] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] raise exception.PortBindingFailed(port_id=port['id']) [ 844.996312] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] nova.exception.PortBindingFailed: Binding failed for port d806ad66-e122-4622-8f56-bfe27013f645, please check neutron logs for more information. [ 844.996312] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] [ 844.996312] env[61273]: INFO nova.compute.manager [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] Terminating instance [ 844.997558] env[61273]: DEBUG oslo_concurrency.lockutils [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Acquiring lock "refresh_cache-05df2575-9c3f-43d4-8fe4-52a808e11080" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 844.997718] env[61273]: DEBUG oslo_concurrency.lockutils [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Acquired lock "refresh_cache-05df2575-9c3f-43d4-8fe4-52a808e11080" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 844.997886] env[61273]: DEBUG nova.network.neutron [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 844.998292] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg b04c40e844a94710a25d6a6a55b8d798 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 845.004816] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b04c40e844a94710a25d6a6a55b8d798 [ 845.258611] env[61273]: DEBUG oslo_concurrency.lockutils [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.350s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 845.259145] env[61273]: DEBUG nova.compute.manager [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: ae9866e2-544a-4d26-b198-87110f42f054] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 845.260810] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg 774cf91ff1f84116abe61f13dde51705 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 845.261933] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.169s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 845.273908] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg d6e1659dc8c4449e899639b4686c5e92 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 845.300447] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 774cf91ff1f84116abe61f13dde51705 [ 845.306092] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d6e1659dc8c4449e899639b4686c5e92 [ 845.515325] env[61273]: DEBUG nova.network.neutron [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 845.597822] env[61273]: DEBUG nova.network.neutron [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 845.598377] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg 60eb643b8b1a46958c4f69e01680f29c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 845.607062] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 60eb643b8b1a46958c4f69e01680f29c [ 845.779877] env[61273]: DEBUG nova.compute.utils [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 845.780608] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg a1c73603cee84b81b6a64feadfefd9e9 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 845.787602] env[61273]: DEBUG nova.compute.manager [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: ae9866e2-544a-4d26-b198-87110f42f054] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 845.789068] env[61273]: DEBUG nova.network.neutron [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: ae9866e2-544a-4d26-b198-87110f42f054] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 845.798795] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a1c73603cee84b81b6a64feadfefd9e9 [ 845.828402] env[61273]: DEBUG nova.policy [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6fe5dcf103624f20b3e75d7f2bbd54b1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f921325e5f6b410fa0137369ca3a9c89', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 845.925518] env[61273]: DEBUG nova.compute.manager [req-63d115c1-28fc-4c6f-a7c3-dbad1f46a634 req-4c9373f2-f39e-474a-904f-4d3e51bdfac8 service nova] [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] Received event network-changed-d806ad66-e122-4622-8f56-bfe27013f645 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 845.925763] env[61273]: DEBUG nova.compute.manager [req-63d115c1-28fc-4c6f-a7c3-dbad1f46a634 req-4c9373f2-f39e-474a-904f-4d3e51bdfac8 service nova] [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] Refreshing instance network info cache due to event network-changed-d806ad66-e122-4622-8f56-bfe27013f645. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 845.925913] env[61273]: DEBUG oslo_concurrency.lockutils [req-63d115c1-28fc-4c6f-a7c3-dbad1f46a634 req-4c9373f2-f39e-474a-904f-4d3e51bdfac8 service nova] Acquiring lock "refresh_cache-05df2575-9c3f-43d4-8fe4-52a808e11080" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 846.074851] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f18fd37-668d-4380-af22-0c13bb007e0b {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.083975] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9cb1435-ab27-44f1-84d7-3a9e09d38d58 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.121524] env[61273]: DEBUG oslo_concurrency.lockutils [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Releasing lock "refresh_cache-05df2575-9c3f-43d4-8fe4-52a808e11080" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 846.122100] env[61273]: DEBUG nova.compute.manager [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 846.122318] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 846.122834] env[61273]: DEBUG oslo_concurrency.lockutils [req-63d115c1-28fc-4c6f-a7c3-dbad1f46a634 req-4c9373f2-f39e-474a-904f-4d3e51bdfac8 service nova] Acquired lock "refresh_cache-05df2575-9c3f-43d4-8fe4-52a808e11080" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 846.123011] env[61273]: DEBUG nova.network.neutron [req-63d115c1-28fc-4c6f-a7c3-dbad1f46a634 req-4c9373f2-f39e-474a-904f-4d3e51bdfac8 service nova] [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] Refreshing network info cache for port d806ad66-e122-4622-8f56-bfe27013f645 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 846.123470] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-63d115c1-28fc-4c6f-a7c3-dbad1f46a634 req-4c9373f2-f39e-474a-904f-4d3e51bdfac8 service nova] Expecting reply to msg e3ffea34a75c43088629123a593c3c6c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 846.124251] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8afc7cf0-b41b-453e-a3c3-5deb34e9fd31 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.138894] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-655e1221-5843-4251-8526-5b6285078a9b {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.142546] env[61273]: DEBUG nova.network.neutron [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: ae9866e2-544a-4d26-b198-87110f42f054] Successfully created port: fdce2ef1-b287-49fc-9741-089b76b0ab0f {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 846.144565] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e3ffea34a75c43088629123a593c3c6c [ 846.150875] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32369b74-644a-4b17-8467-fa6b2a11cc6e {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.158583] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0658bf30-59cb-4dfd-a692-8aaad407f052 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.182579] env[61273]: DEBUG nova.compute.provider_tree [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 846.183108] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg 43d1c4e700dc4c34a2709df72cd7cd2e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 846.188255] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 05df2575-9c3f-43d4-8fe4-52a808e11080 could not be found. [ 846.188456] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 846.188962] env[61273]: INFO nova.compute.manager [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] Took 0.07 seconds to destroy the instance on the hypervisor. [ 846.189211] env[61273]: DEBUG oslo.service.loopingcall [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 846.189947] env[61273]: DEBUG nova.compute.manager [-] [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 846.190051] env[61273]: DEBUG nova.network.neutron [-] [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 846.192299] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 43d1c4e700dc4c34a2709df72cd7cd2e [ 846.215554] env[61273]: DEBUG nova.network.neutron [-] [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 846.216074] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg a73a7061afa84eedb62d74023f51aef1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 846.228173] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a73a7061afa84eedb62d74023f51aef1 [ 846.288904] env[61273]: DEBUG nova.compute.manager [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: ae9866e2-544a-4d26-b198-87110f42f054] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 846.290973] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg 9e83eaa2e0664c45a4afc53cbe2c31e3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 846.323575] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9e83eaa2e0664c45a4afc53cbe2c31e3 [ 846.660795] env[61273]: DEBUG nova.network.neutron [req-63d115c1-28fc-4c6f-a7c3-dbad1f46a634 req-4c9373f2-f39e-474a-904f-4d3e51bdfac8 service nova] [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 846.690937] env[61273]: DEBUG nova.scheduler.client.report [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 846.693453] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg ef9c5ef6cb01414984665a753b0b9bb8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 846.705039] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ef9c5ef6cb01414984665a753b0b9bb8 [ 846.718201] env[61273]: DEBUG nova.network.neutron [-] [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 846.718653] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg dd6a9ed906a549cf99e9195645a8c9f1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 846.727059] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dd6a9ed906a549cf99e9195645a8c9f1 [ 846.756148] env[61273]: DEBUG nova.network.neutron [req-63d115c1-28fc-4c6f-a7c3-dbad1f46a634 req-4c9373f2-f39e-474a-904f-4d3e51bdfac8 service nova] [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 846.756611] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-63d115c1-28fc-4c6f-a7c3-dbad1f46a634 req-4c9373f2-f39e-474a-904f-4d3e51bdfac8 service nova] Expecting reply to msg 278ae53ca8a44f749bcb48f9e88227c4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 846.770061] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 278ae53ca8a44f749bcb48f9e88227c4 [ 846.797144] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg bfad2ae62b2449b0af75981cd02b19ed in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 846.853938] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bfad2ae62b2449b0af75981cd02b19ed [ 847.046576] env[61273]: ERROR nova.compute.manager [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port fdce2ef1-b287-49fc-9741-089b76b0ab0f, please check neutron logs for more information. [ 847.046576] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 847.046576] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 847.046576] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 847.046576] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 847.046576] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 847.046576] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 847.046576] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 847.046576] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 847.046576] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 847.046576] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 847.046576] env[61273]: ERROR nova.compute.manager raise self.value [ 847.046576] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 847.046576] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 847.046576] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 847.046576] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 847.047098] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 847.047098] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 847.047098] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port fdce2ef1-b287-49fc-9741-089b76b0ab0f, please check neutron logs for more information. [ 847.047098] env[61273]: ERROR nova.compute.manager [ 847.047098] env[61273]: Traceback (most recent call last): [ 847.047098] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 847.047098] env[61273]: listener.cb(fileno) [ 847.047098] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 847.047098] env[61273]: result = function(*args, **kwargs) [ 847.047098] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 847.047098] env[61273]: return func(*args, **kwargs) [ 847.047098] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 847.047098] env[61273]: raise e [ 847.047098] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 847.047098] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 847.047098] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 847.047098] env[61273]: created_port_ids = self._update_ports_for_instance( [ 847.047098] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 847.047098] env[61273]: with excutils.save_and_reraise_exception(): [ 847.047098] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 847.047098] env[61273]: self.force_reraise() [ 847.047098] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 847.047098] env[61273]: raise self.value [ 847.047098] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 847.047098] env[61273]: updated_port = self._update_port( [ 847.047098] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 847.047098] env[61273]: _ensure_no_port_binding_failure(port) [ 847.047098] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 847.047098] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 847.047918] env[61273]: nova.exception.PortBindingFailed: Binding failed for port fdce2ef1-b287-49fc-9741-089b76b0ab0f, please check neutron logs for more information. [ 847.047918] env[61273]: Removing descriptor: 15 [ 847.196812] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.935s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 847.197472] env[61273]: ERROR nova.compute.manager [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 9a1e113b-fad2-4524-a3b5-29c7375c9242, please check neutron logs for more information. [ 847.197472] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] Traceback (most recent call last): [ 847.197472] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 847.197472] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] self.driver.spawn(context, instance, image_meta, [ 847.197472] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 847.197472] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] self._vmops.spawn(context, instance, image_meta, injected_files, [ 847.197472] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 847.197472] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] vm_ref = self.build_virtual_machine(instance, [ 847.197472] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 847.197472] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] vif_infos = vmwarevif.get_vif_info(self._session, [ 847.197472] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 847.197840] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] for vif in network_info: [ 847.197840] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 847.197840] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] return self._sync_wrapper(fn, *args, **kwargs) [ 847.197840] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 847.197840] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] self.wait() [ 847.197840] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 847.197840] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] self[:] = self._gt.wait() [ 847.197840] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 847.197840] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] return self._exit_event.wait() [ 847.197840] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 847.197840] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] result = hub.switch() [ 847.197840] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 847.197840] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] return self.greenlet.switch() [ 847.198213] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 847.198213] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] result = function(*args, **kwargs) [ 847.198213] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 847.198213] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] return func(*args, **kwargs) [ 847.198213] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 847.198213] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] raise e [ 847.198213] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 847.198213] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] nwinfo = self.network_api.allocate_for_instance( [ 847.198213] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 847.198213] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] created_port_ids = self._update_ports_for_instance( [ 847.198213] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 847.198213] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] with excutils.save_and_reraise_exception(): [ 847.198213] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 847.198611] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] self.force_reraise() [ 847.198611] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 847.198611] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] raise self.value [ 847.198611] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 847.198611] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] updated_port = self._update_port( [ 847.198611] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 847.198611] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] _ensure_no_port_binding_failure(port) [ 847.198611] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 847.198611] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] raise exception.PortBindingFailed(port_id=port['id']) [ 847.198611] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] nova.exception.PortBindingFailed: Binding failed for port 9a1e113b-fad2-4524-a3b5-29c7375c9242, please check neutron logs for more information. [ 847.198611] env[61273]: ERROR nova.compute.manager [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] [ 847.198920] env[61273]: DEBUG nova.compute.utils [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] Binding failed for port 9a1e113b-fad2-4524-a3b5-29c7375c9242, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 847.199453] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.999s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 847.201338] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 85e27fef7cf14dd7b35161a9e5146f2a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 847.202537] env[61273]: DEBUG nova.compute.manager [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] Build of instance fcdd594c-b89f-4d0b-a4d5-2644b3b62b56 was re-scheduled: Binding failed for port 9a1e113b-fad2-4524-a3b5-29c7375c9242, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 847.203047] env[61273]: DEBUG nova.compute.manager [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 847.203232] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Acquiring lock "refresh_cache-fcdd594c-b89f-4d0b-a4d5-2644b3b62b56" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 847.203378] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Acquired lock "refresh_cache-fcdd594c-b89f-4d0b-a4d5-2644b3b62b56" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.203558] env[61273]: DEBUG nova.network.neutron [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 847.203885] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg a64e982b2b2a40639bdf6d165d156271 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 847.210511] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a64e982b2b2a40639bdf6d165d156271 [ 847.220340] env[61273]: INFO nova.compute.manager [-] [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] Took 1.03 seconds to deallocate network for instance. [ 847.222175] env[61273]: DEBUG nova.compute.claims [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 847.222342] env[61273]: DEBUG oslo_concurrency.lockutils [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 847.235696] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 85e27fef7cf14dd7b35161a9e5146f2a [ 847.258484] env[61273]: DEBUG oslo_concurrency.lockutils [req-63d115c1-28fc-4c6f-a7c3-dbad1f46a634 req-4c9373f2-f39e-474a-904f-4d3e51bdfac8 service nova] Releasing lock "refresh_cache-05df2575-9c3f-43d4-8fe4-52a808e11080" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 847.258727] env[61273]: DEBUG nova.compute.manager [req-63d115c1-28fc-4c6f-a7c3-dbad1f46a634 req-4c9373f2-f39e-474a-904f-4d3e51bdfac8 service nova] [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] Received event network-vif-deleted-d806ad66-e122-4622-8f56-bfe27013f645 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 847.299457] env[61273]: DEBUG nova.compute.manager [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: ae9866e2-544a-4d26-b198-87110f42f054] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 847.324054] env[61273]: DEBUG nova.virt.hardware [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 847.324324] env[61273]: DEBUG nova.virt.hardware [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 847.324479] env[61273]: DEBUG nova.virt.hardware [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 847.324659] env[61273]: DEBUG nova.virt.hardware [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 847.324805] env[61273]: DEBUG nova.virt.hardware [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 847.324951] env[61273]: DEBUG nova.virt.hardware [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 847.325172] env[61273]: DEBUG nova.virt.hardware [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 847.325340] env[61273]: DEBUG nova.virt.hardware [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 847.325514] env[61273]: DEBUG nova.virt.hardware [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 847.325672] env[61273]: DEBUG nova.virt.hardware [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 847.325844] env[61273]: DEBUG nova.virt.hardware [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 847.326752] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca61ab32-6bc2-47c7-a92b-047da10bbed1 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.335447] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e2221ab-55f8-49f0-838c-527461a35a8c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.350170] env[61273]: ERROR nova.compute.manager [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: ae9866e2-544a-4d26-b198-87110f42f054] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port fdce2ef1-b287-49fc-9741-089b76b0ab0f, please check neutron logs for more information. [ 847.350170] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] Traceback (most recent call last): [ 847.350170] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 847.350170] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] yield resources [ 847.350170] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 847.350170] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] self.driver.spawn(context, instance, image_meta, [ 847.350170] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 847.350170] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] self._vmops.spawn(context, instance, image_meta, injected_files, [ 847.350170] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 847.350170] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] vm_ref = self.build_virtual_machine(instance, [ 847.350170] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 847.350571] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] vif_infos = vmwarevif.get_vif_info(self._session, [ 847.350571] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 847.350571] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] for vif in network_info: [ 847.350571] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 847.350571] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] return self._sync_wrapper(fn, *args, **kwargs) [ 847.350571] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 847.350571] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] self.wait() [ 847.350571] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 847.350571] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] self[:] = self._gt.wait() [ 847.350571] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 847.350571] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] return self._exit_event.wait() [ 847.350571] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 847.350571] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] current.throw(*self._exc) [ 847.350906] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 847.350906] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] result = function(*args, **kwargs) [ 847.350906] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 847.350906] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] return func(*args, **kwargs) [ 847.350906] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 847.350906] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] raise e [ 847.350906] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 847.350906] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] nwinfo = self.network_api.allocate_for_instance( [ 847.350906] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 847.350906] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] created_port_ids = self._update_ports_for_instance( [ 847.350906] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 847.350906] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] with excutils.save_and_reraise_exception(): [ 847.350906] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 847.351215] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] self.force_reraise() [ 847.351215] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 847.351215] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] raise self.value [ 847.351215] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 847.351215] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] updated_port = self._update_port( [ 847.351215] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 847.351215] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] _ensure_no_port_binding_failure(port) [ 847.351215] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 847.351215] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] raise exception.PortBindingFailed(port_id=port['id']) [ 847.351215] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] nova.exception.PortBindingFailed: Binding failed for port fdce2ef1-b287-49fc-9741-089b76b0ab0f, please check neutron logs for more information. [ 847.351215] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] [ 847.351215] env[61273]: INFO nova.compute.manager [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: ae9866e2-544a-4d26-b198-87110f42f054] Terminating instance [ 847.353021] env[61273]: DEBUG oslo_concurrency.lockutils [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Acquiring lock "refresh_cache-ae9866e2-544a-4d26-b198-87110f42f054" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 847.353181] env[61273]: DEBUG oslo_concurrency.lockutils [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Acquired lock "refresh_cache-ae9866e2-544a-4d26-b198-87110f42f054" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.353346] env[61273]: DEBUG nova.network.neutron [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: ae9866e2-544a-4d26-b198-87110f42f054] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 847.353768] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg 84b1b179db4041479906f321d3829edb in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 847.360908] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 84b1b179db4041479906f321d3829edb [ 847.729147] env[61273]: DEBUG nova.network.neutron [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 847.801794] env[61273]: DEBUG nova.network.neutron [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 847.802291] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg 144867d034ad423bb9c533a1af07e025 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 847.810760] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 144867d034ad423bb9c533a1af07e025 [ 847.869185] env[61273]: DEBUG nova.network.neutron [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: ae9866e2-544a-4d26-b198-87110f42f054] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 847.941224] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d32ed756-9a68-4a9f-8b24-ccbbd887db43 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.949062] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7da93a3e-4084-417e-89c3-19b33bd4d6a8 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.954839] env[61273]: DEBUG nova.compute.manager [req-6c34b743-690d-4f67-a957-e475a951057f req-60b17a13-7fa2-45e1-97eb-f4b216deb485 service nova] [instance: ae9866e2-544a-4d26-b198-87110f42f054] Received event network-changed-fdce2ef1-b287-49fc-9741-089b76b0ab0f {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 847.955012] env[61273]: DEBUG nova.compute.manager [req-6c34b743-690d-4f67-a957-e475a951057f req-60b17a13-7fa2-45e1-97eb-f4b216deb485 service nova] [instance: ae9866e2-544a-4d26-b198-87110f42f054] Refreshing instance network info cache due to event network-changed-fdce2ef1-b287-49fc-9741-089b76b0ab0f. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 847.955207] env[61273]: DEBUG oslo_concurrency.lockutils [req-6c34b743-690d-4f67-a957-e475a951057f req-60b17a13-7fa2-45e1-97eb-f4b216deb485 service nova] Acquiring lock "refresh_cache-ae9866e2-544a-4d26-b198-87110f42f054" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 847.986892] env[61273]: DEBUG nova.network.neutron [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: ae9866e2-544a-4d26-b198-87110f42f054] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 847.987492] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg 3bb124ce5adf415c8a20978047238fef in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 847.989594] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-312603a2-d410-4eaf-bd74-e192512dc8ad {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.997337] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1f64a7a-2031-4ba1-9635-73aaaf22df84 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.001578] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3bb124ce5adf415c8a20978047238fef [ 848.011784] env[61273]: DEBUG nova.compute.provider_tree [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 848.012297] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg c333468e86f945e180a9943d53f9e628 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 848.019012] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c333468e86f945e180a9943d53f9e628 [ 848.304402] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Releasing lock "refresh_cache-fcdd594c-b89f-4d0b-a4d5-2644b3b62b56" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 848.304658] env[61273]: DEBUG nova.compute.manager [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 848.304850] env[61273]: DEBUG nova.compute.manager [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 848.305016] env[61273]: DEBUG nova.network.neutron [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 848.319590] env[61273]: DEBUG nova.network.neutron [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 848.320160] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg 524cda0f094d446e99aaed1f0a35c690 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 848.327564] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 524cda0f094d446e99aaed1f0a35c690 [ 848.490137] env[61273]: DEBUG oslo_concurrency.lockutils [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Releasing lock "refresh_cache-ae9866e2-544a-4d26-b198-87110f42f054" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 848.490575] env[61273]: DEBUG nova.compute.manager [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: ae9866e2-544a-4d26-b198-87110f42f054] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 848.490802] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: ae9866e2-544a-4d26-b198-87110f42f054] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 848.491112] env[61273]: DEBUG oslo_concurrency.lockutils [req-6c34b743-690d-4f67-a957-e475a951057f req-60b17a13-7fa2-45e1-97eb-f4b216deb485 service nova] Acquired lock "refresh_cache-ae9866e2-544a-4d26-b198-87110f42f054" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.491285] env[61273]: DEBUG nova.network.neutron [req-6c34b743-690d-4f67-a957-e475a951057f req-60b17a13-7fa2-45e1-97eb-f4b216deb485 service nova] [instance: ae9866e2-544a-4d26-b198-87110f42f054] Refreshing network info cache for port fdce2ef1-b287-49fc-9741-089b76b0ab0f {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 848.491831] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-6c34b743-690d-4f67-a957-e475a951057f req-60b17a13-7fa2-45e1-97eb-f4b216deb485 service nova] Expecting reply to msg 3c994c2493ac46b7b31bae977079dff3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 848.492703] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-27157bca-4183-4270-b63b-2e9fa3a551f0 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.499161] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3c994c2493ac46b7b31bae977079dff3 [ 848.502136] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e657f1d4-eff8-4279-9d44-3bfad1692ae7 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.514275] env[61273]: DEBUG nova.scheduler.client.report [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 848.516528] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 14ffa1b0098247fbb27a29f4c9584d61 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 848.523527] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: ae9866e2-544a-4d26-b198-87110f42f054] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ae9866e2-544a-4d26-b198-87110f42f054 could not be found. [ 848.523719] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: ae9866e2-544a-4d26-b198-87110f42f054] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 848.523904] env[61273]: INFO nova.compute.manager [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: ae9866e2-544a-4d26-b198-87110f42f054] Took 0.03 seconds to destroy the instance on the hypervisor. [ 848.524165] env[61273]: DEBUG oslo.service.loopingcall [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 848.524410] env[61273]: DEBUG nova.compute.manager [-] [instance: ae9866e2-544a-4d26-b198-87110f42f054] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 848.524506] env[61273]: DEBUG nova.network.neutron [-] [instance: ae9866e2-544a-4d26-b198-87110f42f054] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 848.530203] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 14ffa1b0098247fbb27a29f4c9584d61 [ 848.541421] env[61273]: DEBUG nova.network.neutron [-] [instance: ae9866e2-544a-4d26-b198-87110f42f054] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 848.541874] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 3e30ed8cdb694252b7c1c0c4ce7f0487 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 848.547504] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3e30ed8cdb694252b7c1c0c4ce7f0487 [ 848.822488] env[61273]: DEBUG nova.network.neutron [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 848.823734] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg 9050518b68bd4ed2848327debee731fe in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 848.841494] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9050518b68bd4ed2848327debee731fe [ 849.013316] env[61273]: DEBUG nova.network.neutron [req-6c34b743-690d-4f67-a957-e475a951057f req-60b17a13-7fa2-45e1-97eb-f4b216deb485 service nova] [instance: ae9866e2-544a-4d26-b198-87110f42f054] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 849.018758] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.819s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 849.019526] env[61273]: ERROR nova.compute.manager [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 6494039f-3716-4174-92c0-15df384e0878] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 2c33529b-82e4-4376-a0ac-52e2aa5b95c6, please check neutron logs for more information. [ 849.019526] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] Traceback (most recent call last): [ 849.019526] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 849.019526] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] self.driver.spawn(context, instance, image_meta, [ 849.019526] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 849.019526] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] self._vmops.spawn(context, instance, image_meta, injected_files, [ 849.019526] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 849.019526] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] vm_ref = self.build_virtual_machine(instance, [ 849.019526] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 849.019526] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] vif_infos = vmwarevif.get_vif_info(self._session, [ 849.019526] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 849.019918] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] for vif in network_info: [ 849.019918] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 849.019918] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] return self._sync_wrapper(fn, *args, **kwargs) [ 849.019918] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 849.019918] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] self.wait() [ 849.019918] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 849.019918] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] self[:] = self._gt.wait() [ 849.019918] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 849.019918] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] return self._exit_event.wait() [ 849.019918] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 849.019918] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] result = hub.switch() [ 849.019918] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 849.019918] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] return self.greenlet.switch() [ 849.020304] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 849.020304] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] result = function(*args, **kwargs) [ 849.020304] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 849.020304] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] return func(*args, **kwargs) [ 849.020304] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 849.020304] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] raise e [ 849.020304] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 849.020304] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] nwinfo = self.network_api.allocate_for_instance( [ 849.020304] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 849.020304] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] created_port_ids = self._update_ports_for_instance( [ 849.020304] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 849.020304] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] with excutils.save_and_reraise_exception(): [ 849.020304] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 849.020672] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] self.force_reraise() [ 849.020672] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 849.020672] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] raise self.value [ 849.020672] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 849.020672] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] updated_port = self._update_port( [ 849.020672] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 849.020672] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] _ensure_no_port_binding_failure(port) [ 849.020672] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 849.020672] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] raise exception.PortBindingFailed(port_id=port['id']) [ 849.020672] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] nova.exception.PortBindingFailed: Binding failed for port 2c33529b-82e4-4376-a0ac-52e2aa5b95c6, please check neutron logs for more information. [ 849.020672] env[61273]: ERROR nova.compute.manager [instance: 6494039f-3716-4174-92c0-15df384e0878] [ 849.020990] env[61273]: DEBUG nova.compute.utils [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 6494039f-3716-4174-92c0-15df384e0878] Binding failed for port 2c33529b-82e4-4376-a0ac-52e2aa5b95c6, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 849.022083] env[61273]: DEBUG oslo_concurrency.lockutils [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.042s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 849.023560] env[61273]: INFO nova.compute.claims [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 849.025138] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg 99944347598a46299190f11c0b91204b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 849.026532] env[61273]: DEBUG nova.compute.manager [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 6494039f-3716-4174-92c0-15df384e0878] Build of instance 6494039f-3716-4174-92c0-15df384e0878 was re-scheduled: Binding failed for port 2c33529b-82e4-4376-a0ac-52e2aa5b95c6, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 849.026983] env[61273]: DEBUG nova.compute.manager [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 6494039f-3716-4174-92c0-15df384e0878] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 849.027210] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Acquiring lock "refresh_cache-6494039f-3716-4174-92c0-15df384e0878" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 849.027358] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Acquired lock "refresh_cache-6494039f-3716-4174-92c0-15df384e0878" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 849.027519] env[61273]: DEBUG nova.network.neutron [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 6494039f-3716-4174-92c0-15df384e0878] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 849.027882] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 757932957fb543aa94385b9232922580 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 849.033921] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 757932957fb543aa94385b9232922580 [ 849.043780] env[61273]: DEBUG nova.network.neutron [-] [instance: ae9866e2-544a-4d26-b198-87110f42f054] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 849.045420] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg ee037784c1614bd5a31dc7c2eaeaec87 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 849.066166] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ee037784c1614bd5a31dc7c2eaeaec87 [ 849.070155] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 99944347598a46299190f11c0b91204b [ 849.091148] env[61273]: DEBUG nova.network.neutron [req-6c34b743-690d-4f67-a957-e475a951057f req-60b17a13-7fa2-45e1-97eb-f4b216deb485 service nova] [instance: ae9866e2-544a-4d26-b198-87110f42f054] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 849.091397] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-6c34b743-690d-4f67-a957-e475a951057f req-60b17a13-7fa2-45e1-97eb-f4b216deb485 service nova] Expecting reply to msg d7476f5e0d7a492a94968bff917ac6c2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 849.099029] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d7476f5e0d7a492a94968bff917ac6c2 [ 849.335553] env[61273]: INFO nova.compute.manager [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: fcdd594c-b89f-4d0b-a4d5-2644b3b62b56] Took 1.03 seconds to deallocate network for instance. [ 849.337640] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg 32cfe04b65f446dda9674134aee83443 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 849.373659] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 32cfe04b65f446dda9674134aee83443 [ 849.531845] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg 34e2019e27aa456f86ad2e80dd6c4479 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 849.538809] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 34e2019e27aa456f86ad2e80dd6c4479 [ 849.546934] env[61273]: INFO nova.compute.manager [-] [instance: ae9866e2-544a-4d26-b198-87110f42f054] Took 1.02 seconds to deallocate network for instance. [ 849.547918] env[61273]: DEBUG nova.network.neutron [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 6494039f-3716-4174-92c0-15df384e0878] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 849.551169] env[61273]: DEBUG nova.compute.claims [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: ae9866e2-544a-4d26-b198-87110f42f054] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 849.551345] env[61273]: DEBUG oslo_concurrency.lockutils [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 849.593520] env[61273]: DEBUG oslo_concurrency.lockutils [req-6c34b743-690d-4f67-a957-e475a951057f req-60b17a13-7fa2-45e1-97eb-f4b216deb485 service nova] Releasing lock "refresh_cache-ae9866e2-544a-4d26-b198-87110f42f054" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 849.593803] env[61273]: DEBUG nova.compute.manager [req-6c34b743-690d-4f67-a957-e475a951057f req-60b17a13-7fa2-45e1-97eb-f4b216deb485 service nova] [instance: ae9866e2-544a-4d26-b198-87110f42f054] Received event network-vif-deleted-fdce2ef1-b287-49fc-9741-089b76b0ab0f {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 849.608119] env[61273]: DEBUG nova.network.neutron [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 6494039f-3716-4174-92c0-15df384e0878] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 849.608587] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 9d9b5289ce6740c2be6d98b195742378 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 849.616427] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9d9b5289ce6740c2be6d98b195742378 [ 849.842855] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg 5d35c2452c7747f4895d3eb1eac16a8d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 849.872064] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5d35c2452c7747f4895d3eb1eac16a8d [ 850.110585] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Releasing lock "refresh_cache-6494039f-3716-4174-92c0-15df384e0878" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 850.111168] env[61273]: DEBUG nova.compute.manager [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 850.111168] env[61273]: DEBUG nova.compute.manager [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 6494039f-3716-4174-92c0-15df384e0878] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 850.111168] env[61273]: DEBUG nova.network.neutron [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 6494039f-3716-4174-92c0-15df384e0878] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 850.128621] env[61273]: DEBUG nova.network.neutron [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 6494039f-3716-4174-92c0-15df384e0878] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 850.129221] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 85250cc1badd4821bf23134e4c1cef8a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 850.137704] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 85250cc1badd4821bf23134e4c1cef8a [ 850.257286] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ea0d313-24f8-4bad-b533-b83f247735a2 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.264677] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaa4c2e2-6a9c-4a97-aac2-fd0099366e9c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.293291] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1816538-fcc7-4a17-907f-b9a0db3eb97c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.300055] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b72c2aba-d1ef-49bc-8352-4cbde80e9980 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.313520] env[61273]: DEBUG nova.compute.provider_tree [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 850.313983] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg ead40899f75f4b198189b8fef834aea6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 850.321154] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ead40899f75f4b198189b8fef834aea6 [ 850.362601] env[61273]: INFO nova.scheduler.client.report [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Deleted allocations for instance fcdd594c-b89f-4d0b-a4d5-2644b3b62b56 [ 850.368465] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg ce0aac556789403f980663c8ba8bc55c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 850.382127] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ce0aac556789403f980663c8ba8bc55c [ 850.631560] env[61273]: DEBUG nova.network.neutron [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 6494039f-3716-4174-92c0-15df384e0878] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 850.632174] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 52bc556cbdc2496fbf3f0ea3faadc153 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 850.640763] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 52bc556cbdc2496fbf3f0ea3faadc153 [ 850.817108] env[61273]: DEBUG nova.scheduler.client.report [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 850.819867] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg a1c555678d114580a110b82e84a2532a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 850.830528] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a1c555678d114580a110b82e84a2532a [ 850.870916] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c03e671b-fa07-4498-8ea1-9c9117d2577f tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Lock "fcdd594c-b89f-4d0b-a4d5-2644b3b62b56" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 90.935s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 850.871462] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg e0be39f924054e5c842e95efda1472fd in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 850.882086] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e0be39f924054e5c842e95efda1472fd [ 851.134211] env[61273]: INFO nova.compute.manager [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 6494039f-3716-4174-92c0-15df384e0878] Took 1.02 seconds to deallocate network for instance. [ 851.135924] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 7cfd97124ab14d9d9724b49725dcd114 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 851.168719] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7cfd97124ab14d9d9724b49725dcd114 [ 851.322387] env[61273]: DEBUG oslo_concurrency.lockutils [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.300s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 851.322961] env[61273]: DEBUG nova.compute.manager [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 851.324980] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg 477a04f7c43045e293546c8a64eac78d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 851.325797] env[61273]: DEBUG oslo_concurrency.lockutils [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.510s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 851.327169] env[61273]: INFO nova.compute.claims [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 851.328704] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg da19428056c14cf499f9b23de726b778 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 851.359510] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 477a04f7c43045e293546c8a64eac78d [ 851.363978] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg da19428056c14cf499f9b23de726b778 [ 851.373068] env[61273]: DEBUG nova.compute.manager [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 851.374807] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg 1dabf932226d4fe6bb9ba02c44c17dae in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 851.412392] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1dabf932226d4fe6bb9ba02c44c17dae [ 851.640541] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 4e9ad12a6ca144798b163015ffe7135c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 851.672686] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4e9ad12a6ca144798b163015ffe7135c [ 851.832261] env[61273]: DEBUG nova.compute.utils [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 851.832927] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg 09c2e5a911704078bcad8c5aded24bf9 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 851.838489] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg ee1850f4282e4d32863a88868b186ae4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 851.838489] env[61273]: DEBUG nova.compute.manager [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 851.838489] env[61273]: DEBUG nova.network.neutron [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 851.874040] env[61273]: DEBUG nova.policy [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6fe5dcf103624f20b3e75d7f2bbd54b1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f921325e5f6b410fa0137369ca3a9c89', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 851.896909] env[61273]: DEBUG oslo_concurrency.lockutils [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 851.897526] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ee1850f4282e4d32863a88868b186ae4 [ 851.901756] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 09c2e5a911704078bcad8c5aded24bf9 [ 852.163359] env[61273]: INFO nova.scheduler.client.report [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Deleted allocations for instance 6494039f-3716-4174-92c0-15df384e0878 [ 852.173781] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 9aa8f2a8f33c4563a2d3d85c0f74bfee in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 852.183353] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9aa8f2a8f33c4563a2d3d85c0f74bfee [ 852.292248] env[61273]: DEBUG nova.network.neutron [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] Successfully created port: f0f1a201-d4ee-4abf-a2c8-f33f9a7b67f6 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 852.337538] env[61273]: DEBUG nova.compute.manager [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 852.339293] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg 2e44677e13e14234a1fef84de274fe67 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 852.377243] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2e44677e13e14234a1fef84de274fe67 [ 852.555183] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f153f6a-c040-4a83-b86c-d8833e8c70dd {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.563102] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e313b4d-5934-4842-9005-f14afffaede4 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.596946] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7a9c229-d73e-4ad1-905d-2ec2a1dc1cae {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.605491] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2801e87-4a9c-4ec4-8ba3-bd1701ab03b3 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.620055] env[61273]: DEBUG nova.compute.provider_tree [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 852.620692] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 660504f3d5da4997913dcfb11d2cd736 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 852.628667] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 660504f3d5da4997913dcfb11d2cd736 [ 852.671824] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ce441822-9883-41c5-873d-f7db8e167cc0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Lock "6494039f-3716-4174-92c0-15df384e0878" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 85.947s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 852.672641] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 97b47a5dc69b474498d0d3a7552d179b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 852.682668] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 97b47a5dc69b474498d0d3a7552d179b [ 852.846790] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg 7115f062abd8441dbbad60981e8ed392 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 852.894966] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7115f062abd8441dbbad60981e8ed392 [ 853.104111] env[61273]: DEBUG nova.compute.manager [req-1674b3ab-bba7-49de-97c1-006dcb4d9353 req-17628e34-efb3-40be-92c0-113b7296da7f service nova] [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] Received event network-changed-f0f1a201-d4ee-4abf-a2c8-f33f9a7b67f6 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 853.104596] env[61273]: DEBUG nova.compute.manager [req-1674b3ab-bba7-49de-97c1-006dcb4d9353 req-17628e34-efb3-40be-92c0-113b7296da7f service nova] [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] Refreshing instance network info cache due to event network-changed-f0f1a201-d4ee-4abf-a2c8-f33f9a7b67f6. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 853.104596] env[61273]: DEBUG oslo_concurrency.lockutils [req-1674b3ab-bba7-49de-97c1-006dcb4d9353 req-17628e34-efb3-40be-92c0-113b7296da7f service nova] Acquiring lock "refresh_cache-b720f9f1-9401-40b1-978b-9b8eefe712ea" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 853.104813] env[61273]: DEBUG oslo_concurrency.lockutils [req-1674b3ab-bba7-49de-97c1-006dcb4d9353 req-17628e34-efb3-40be-92c0-113b7296da7f service nova] Acquired lock "refresh_cache-b720f9f1-9401-40b1-978b-9b8eefe712ea" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.104813] env[61273]: DEBUG nova.network.neutron [req-1674b3ab-bba7-49de-97c1-006dcb4d9353 req-17628e34-efb3-40be-92c0-113b7296da7f service nova] [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] Refreshing network info cache for port f0f1a201-d4ee-4abf-a2c8-f33f9a7b67f6 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 853.105230] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-1674b3ab-bba7-49de-97c1-006dcb4d9353 req-17628e34-efb3-40be-92c0-113b7296da7f service nova] Expecting reply to msg c86749a6235c4971af161283e4a9e8b3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 853.115510] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c86749a6235c4971af161283e4a9e8b3 [ 853.122772] env[61273]: DEBUG nova.scheduler.client.report [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 853.128360] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg fef5a51424f44e3683f094aa02704348 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 853.137428] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fef5a51424f44e3683f094aa02704348 [ 853.174980] env[61273]: DEBUG nova.compute.manager [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 853.176717] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 1d587c01c7f948e3b9520258b818d4db in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 853.226930] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1d587c01c7f948e3b9520258b818d4db [ 853.314026] env[61273]: ERROR nova.compute.manager [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port f0f1a201-d4ee-4abf-a2c8-f33f9a7b67f6, please check neutron logs for more information. [ 853.314026] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 853.314026] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 853.314026] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 853.314026] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 853.314026] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 853.314026] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 853.314026] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 853.314026] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 853.314026] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 853.314026] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 853.314026] env[61273]: ERROR nova.compute.manager raise self.value [ 853.314026] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 853.314026] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 853.314026] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 853.314026] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 853.314460] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 853.314460] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 853.314460] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port f0f1a201-d4ee-4abf-a2c8-f33f9a7b67f6, please check neutron logs for more information. [ 853.314460] env[61273]: ERROR nova.compute.manager [ 853.314460] env[61273]: Traceback (most recent call last): [ 853.314460] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 853.314460] env[61273]: listener.cb(fileno) [ 853.314460] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 853.314460] env[61273]: result = function(*args, **kwargs) [ 853.314460] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 853.314460] env[61273]: return func(*args, **kwargs) [ 853.314460] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 853.314460] env[61273]: raise e [ 853.314460] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 853.314460] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 853.314460] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 853.314460] env[61273]: created_port_ids = self._update_ports_for_instance( [ 853.314460] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 853.314460] env[61273]: with excutils.save_and_reraise_exception(): [ 853.314460] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 853.314460] env[61273]: self.force_reraise() [ 853.314460] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 853.314460] env[61273]: raise self.value [ 853.314460] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 853.314460] env[61273]: updated_port = self._update_port( [ 853.314460] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 853.314460] env[61273]: _ensure_no_port_binding_failure(port) [ 853.314460] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 853.314460] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 853.315207] env[61273]: nova.exception.PortBindingFailed: Binding failed for port f0f1a201-d4ee-4abf-a2c8-f33f9a7b67f6, please check neutron logs for more information. [ 853.315207] env[61273]: Removing descriptor: 15 [ 853.349695] env[61273]: DEBUG nova.compute.manager [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 853.372034] env[61273]: DEBUG nova.virt.hardware [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 853.372034] env[61273]: DEBUG nova.virt.hardware [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 853.372034] env[61273]: DEBUG nova.virt.hardware [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 853.372229] env[61273]: DEBUG nova.virt.hardware [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 853.372229] env[61273]: DEBUG nova.virt.hardware [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 853.372229] env[61273]: DEBUG nova.virt.hardware [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 853.372229] env[61273]: DEBUG nova.virt.hardware [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 853.372229] env[61273]: DEBUG nova.virt.hardware [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 853.372384] env[61273]: DEBUG nova.virt.hardware [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 853.372384] env[61273]: DEBUG nova.virt.hardware [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 853.372553] env[61273]: DEBUG nova.virt.hardware [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 853.373397] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edc7bf91-5e13-46bc-93d9-4b083a77ecdb {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.381208] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d30114c-29c9-418f-8c22-431b78ed48ab {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.395645] env[61273]: ERROR nova.compute.manager [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port f0f1a201-d4ee-4abf-a2c8-f33f9a7b67f6, please check neutron logs for more information. [ 853.395645] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] Traceback (most recent call last): [ 853.395645] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 853.395645] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] yield resources [ 853.395645] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 853.395645] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] self.driver.spawn(context, instance, image_meta, [ 853.395645] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 853.395645] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] self._vmops.spawn(context, instance, image_meta, injected_files, [ 853.395645] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 853.395645] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] vm_ref = self.build_virtual_machine(instance, [ 853.395645] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 853.395991] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] vif_infos = vmwarevif.get_vif_info(self._session, [ 853.395991] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 853.395991] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] for vif in network_info: [ 853.395991] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 853.395991] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] return self._sync_wrapper(fn, *args, **kwargs) [ 853.395991] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 853.395991] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] self.wait() [ 853.395991] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 853.395991] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] self[:] = self._gt.wait() [ 853.395991] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 853.395991] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] return self._exit_event.wait() [ 853.395991] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 853.395991] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] current.throw(*self._exc) [ 853.396330] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 853.396330] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] result = function(*args, **kwargs) [ 853.396330] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 853.396330] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] return func(*args, **kwargs) [ 853.396330] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 853.396330] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] raise e [ 853.396330] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 853.396330] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] nwinfo = self.network_api.allocate_for_instance( [ 853.396330] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 853.396330] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] created_port_ids = self._update_ports_for_instance( [ 853.396330] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 853.396330] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] with excutils.save_and_reraise_exception(): [ 853.396330] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 853.396639] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] self.force_reraise() [ 853.396639] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 853.396639] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] raise self.value [ 853.396639] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 853.396639] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] updated_port = self._update_port( [ 853.396639] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 853.396639] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] _ensure_no_port_binding_failure(port) [ 853.396639] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 853.396639] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] raise exception.PortBindingFailed(port_id=port['id']) [ 853.396639] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] nova.exception.PortBindingFailed: Binding failed for port f0f1a201-d4ee-4abf-a2c8-f33f9a7b67f6, please check neutron logs for more information. [ 853.396639] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] [ 853.396639] env[61273]: INFO nova.compute.manager [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] Terminating instance [ 853.398229] env[61273]: DEBUG oslo_concurrency.lockutils [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Acquiring lock "refresh_cache-b720f9f1-9401-40b1-978b-9b8eefe712ea" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 853.625619] env[61273]: DEBUG nova.network.neutron [req-1674b3ab-bba7-49de-97c1-006dcb4d9353 req-17628e34-efb3-40be-92c0-113b7296da7f service nova] [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 853.627946] env[61273]: DEBUG oslo_concurrency.lockutils [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.302s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 853.628495] env[61273]: DEBUG nova.compute.manager [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 853.630332] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg b5858f2c9b4c41c486968fcedf885935 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 853.632032] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 15.997s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 853.632189] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 853.632491] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61273) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 853.632835] env[61273]: DEBUG oslo_concurrency.lockutils [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.919s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 853.634465] env[61273]: INFO nova.compute.claims [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 853.636151] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg 639f1a1c80d342efb38ea03a8a623ab9 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 853.638995] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db71b29d-93d4-487e-8758-da3dfaa426ed {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.646682] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Acquiring lock "d4a2025d-c128-45a2-b74c-a7fd2630d615" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 853.647011] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Lock "d4a2025d-c128-45a2-b74c-a7fd2630d615" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 853.652231] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6053abd4-9993-4d3e-b6f6-fefad55c604f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.669021] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b5858f2c9b4c41c486968fcedf885935 [ 853.669713] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 639f1a1c80d342efb38ea03a8a623ab9 [ 853.670654] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1441cdc-c2ea-48e2-a376-cdd10ab85710 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.680547] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9abf902-1b2e-45f0-9199-5535a72a08f2 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.719145] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181719MB free_disk=141GB free_vcpus=48 pci_devices=None {{(pid=61273) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 853.719353] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 853.720575] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 853.721440] env[61273]: DEBUG nova.network.neutron [req-1674b3ab-bba7-49de-97c1-006dcb4d9353 req-17628e34-efb3-40be-92c0-113b7296da7f service nova] [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 853.721999] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-1674b3ab-bba7-49de-97c1-006dcb4d9353 req-17628e34-efb3-40be-92c0-113b7296da7f service nova] Expecting reply to msg 83e95db5309948b8ac0781523d7bb857 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 853.730735] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 83e95db5309948b8ac0781523d7bb857 [ 853.793698] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Acquiring lock "1e7c4c50-1d33-4947-80bc-ac3ca2453fd5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 853.793935] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Lock "1e7c4c50-1d33-4947-80bc-ac3ca2453fd5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 854.133682] env[61273]: DEBUG nova.compute.utils [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 854.134733] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg ad9205a9f52f47c28f7bf2fc3d388788 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 854.135291] env[61273]: DEBUG nova.compute.manager [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 854.135460] env[61273]: DEBUG nova.network.neutron [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 854.138832] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg de6d7b0289cb4bb1a5368cfeffb9c755 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 854.146690] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ad9205a9f52f47c28f7bf2fc3d388788 [ 854.146816] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg de6d7b0289cb4bb1a5368cfeffb9c755 [ 854.176239] env[61273]: DEBUG nova.policy [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3078a2af81b248f8b100f58ee66a5a86', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c06b50a7aaa742afbbd0c6fc56c3d131', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 854.224909] env[61273]: DEBUG oslo_concurrency.lockutils [req-1674b3ab-bba7-49de-97c1-006dcb4d9353 req-17628e34-efb3-40be-92c0-113b7296da7f service nova] Releasing lock "refresh_cache-b720f9f1-9401-40b1-978b-9b8eefe712ea" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 854.225065] env[61273]: DEBUG oslo_concurrency.lockutils [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Acquired lock "refresh_cache-b720f9f1-9401-40b1-978b-9b8eefe712ea" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 854.225235] env[61273]: DEBUG nova.network.neutron [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 854.225678] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg aac19559caf544558c5d58c3300d6160 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 854.232672] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aac19559caf544558c5d58c3300d6160 [ 854.429051] env[61273]: DEBUG nova.network.neutron [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] Successfully created port: 94ad734d-164b-4d36-83a3-e69aa8777538 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 854.638067] env[61273]: DEBUG nova.compute.manager [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 854.639855] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 84ea956fb4894d868a4bec320fb6ad2c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 854.675302] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 84ea956fb4894d868a4bec320fb6ad2c [ 854.745114] env[61273]: DEBUG nova.network.neutron [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 854.825355] env[61273]: DEBUG nova.network.neutron [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 854.825918] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg 6da46234324746fc884bfcdff61de573 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 854.838145] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6da46234324746fc884bfcdff61de573 [ 854.876589] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f2ede2e-c7f9-4f4a-8f20-f97d615abdd7 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.884037] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9c190f0-d5f8-4847-a7a2-b248759afe59 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.912900] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6e38781-3a8e-4dc6-8139-6045daf5fa49 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.919495] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-518c2d99-4971-46ec-b70e-a191cea6c4c3 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.933302] env[61273]: DEBUG nova.compute.provider_tree [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 854.933773] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg f13630699b4a48c0abeea0aebcdbc369 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 854.941044] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f13630699b4a48c0abeea0aebcdbc369 [ 855.147016] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 2a02e19baf694f708fa2010798f70616 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 855.149415] env[61273]: DEBUG nova.compute.manager [req-5c46bfcb-efa5-4153-807d-50cf985558ec req-e9909f21-da44-4660-88ba-032ec18761c4 service nova] [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] Received event network-vif-deleted-f0f1a201-d4ee-4abf-a2c8-f33f9a7b67f6 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 855.150330] env[61273]: DEBUG nova.compute.manager [req-5c46bfcb-efa5-4153-807d-50cf985558ec req-e9909f21-da44-4660-88ba-032ec18761c4 service nova] [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] Received event network-changed-94ad734d-164b-4d36-83a3-e69aa8777538 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 855.150330] env[61273]: DEBUG nova.compute.manager [req-5c46bfcb-efa5-4153-807d-50cf985558ec req-e9909f21-da44-4660-88ba-032ec18761c4 service nova] [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] Refreshing instance network info cache due to event network-changed-94ad734d-164b-4d36-83a3-e69aa8777538. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 855.150330] env[61273]: DEBUG oslo_concurrency.lockutils [req-5c46bfcb-efa5-4153-807d-50cf985558ec req-e9909f21-da44-4660-88ba-032ec18761c4 service nova] Acquiring lock "refresh_cache-9952d347-2ca7-48f2-8ee1-dc1d767402dc" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 855.150330] env[61273]: DEBUG oslo_concurrency.lockutils [req-5c46bfcb-efa5-4153-807d-50cf985558ec req-e9909f21-da44-4660-88ba-032ec18761c4 service nova] Acquired lock "refresh_cache-9952d347-2ca7-48f2-8ee1-dc1d767402dc" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.150330] env[61273]: DEBUG nova.network.neutron [req-5c46bfcb-efa5-4153-807d-50cf985558ec req-e9909f21-da44-4660-88ba-032ec18761c4 service nova] [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] Refreshing network info cache for port 94ad734d-164b-4d36-83a3-e69aa8777538 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 855.150578] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-5c46bfcb-efa5-4153-807d-50cf985558ec req-e9909f21-da44-4660-88ba-032ec18761c4 service nova] Expecting reply to msg e1f2495b5c44464eb661cc01bdadb01a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 855.154346] env[61273]: ERROR nova.compute.manager [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 94ad734d-164b-4d36-83a3-e69aa8777538, please check neutron logs for more information. [ 855.154346] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 855.154346] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 855.154346] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 855.154346] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 855.154346] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 855.154346] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 855.154346] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 855.154346] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 855.154346] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 855.154346] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 855.154346] env[61273]: ERROR nova.compute.manager raise self.value [ 855.154346] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 855.154346] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 855.154346] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 855.154346] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 855.154770] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 855.154770] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 855.154770] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 94ad734d-164b-4d36-83a3-e69aa8777538, please check neutron logs for more information. [ 855.154770] env[61273]: ERROR nova.compute.manager [ 855.154770] env[61273]: Traceback (most recent call last): [ 855.154770] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 855.154770] env[61273]: listener.cb(fileno) [ 855.154770] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 855.154770] env[61273]: result = function(*args, **kwargs) [ 855.154770] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 855.154770] env[61273]: return func(*args, **kwargs) [ 855.154770] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 855.154770] env[61273]: raise e [ 855.154770] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 855.154770] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 855.154770] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 855.154770] env[61273]: created_port_ids = self._update_ports_for_instance( [ 855.154770] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 855.154770] env[61273]: with excutils.save_and_reraise_exception(): [ 855.154770] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 855.154770] env[61273]: self.force_reraise() [ 855.154770] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 855.154770] env[61273]: raise self.value [ 855.154770] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 855.154770] env[61273]: updated_port = self._update_port( [ 855.154770] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 855.154770] env[61273]: _ensure_no_port_binding_failure(port) [ 855.154770] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 855.154770] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 855.155586] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 94ad734d-164b-4d36-83a3-e69aa8777538, please check neutron logs for more information. [ 855.155586] env[61273]: Removing descriptor: 15 [ 855.160029] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e1f2495b5c44464eb661cc01bdadb01a [ 855.197053] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2a02e19baf694f708fa2010798f70616 [ 855.328302] env[61273]: DEBUG oslo_concurrency.lockutils [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Releasing lock "refresh_cache-b720f9f1-9401-40b1-978b-9b8eefe712ea" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 855.328751] env[61273]: DEBUG nova.compute.manager [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 855.328943] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 855.329282] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-91e98b99-9fdc-4350-9b40-d325c627a673 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.338732] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-650df953-da20-412a-98aa-fc7c6d7c839c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.360397] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b720f9f1-9401-40b1-978b-9b8eefe712ea could not be found. [ 855.360625] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 855.360805] env[61273]: INFO nova.compute.manager [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] Took 0.03 seconds to destroy the instance on the hypervisor. [ 855.361047] env[61273]: DEBUG oslo.service.loopingcall [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 855.361274] env[61273]: DEBUG nova.compute.manager [-] [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 855.361366] env[61273]: DEBUG nova.network.neutron [-] [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 855.375125] env[61273]: DEBUG nova.network.neutron [-] [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 855.375671] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 6de122747d144291b3e29ca3a8086714 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 855.383695] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6de122747d144291b3e29ca3a8086714 [ 855.437002] env[61273]: DEBUG nova.scheduler.client.report [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 855.439802] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg a853d51289da43d7ab0f56e2a0d619b2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 855.452989] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a853d51289da43d7ab0f56e2a0d619b2 [ 855.653635] env[61273]: DEBUG nova.compute.manager [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 855.671345] env[61273]: DEBUG nova.network.neutron [req-5c46bfcb-efa5-4153-807d-50cf985558ec req-e9909f21-da44-4660-88ba-032ec18761c4 service nova] [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 855.681160] env[61273]: DEBUG nova.virt.hardware [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 855.681160] env[61273]: DEBUG nova.virt.hardware [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 855.681377] env[61273]: DEBUG nova.virt.hardware [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 855.681449] env[61273]: DEBUG nova.virt.hardware [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 855.681629] env[61273]: DEBUG nova.virt.hardware [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 855.681841] env[61273]: DEBUG nova.virt.hardware [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 855.682078] env[61273]: DEBUG nova.virt.hardware [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 855.682409] env[61273]: DEBUG nova.virt.hardware [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 855.682555] env[61273]: DEBUG nova.virt.hardware [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 855.682795] env[61273]: DEBUG nova.virt.hardware [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 855.683062] env[61273]: DEBUG nova.virt.hardware [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 855.684040] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88b4f5d2-1d36-4c1c-a837-a9ae1e6f3f1d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.691841] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72076a8d-8a21-4899-bf24-233e8826794c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.705275] env[61273]: ERROR nova.compute.manager [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 94ad734d-164b-4d36-83a3-e69aa8777538, please check neutron logs for more information. [ 855.705275] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] Traceback (most recent call last): [ 855.705275] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 855.705275] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] yield resources [ 855.705275] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 855.705275] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] self.driver.spawn(context, instance, image_meta, [ 855.705275] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 855.705275] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 855.705275] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 855.705275] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] vm_ref = self.build_virtual_machine(instance, [ 855.705275] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 855.705686] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] vif_infos = vmwarevif.get_vif_info(self._session, [ 855.705686] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 855.705686] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] for vif in network_info: [ 855.705686] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 855.705686] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] return self._sync_wrapper(fn, *args, **kwargs) [ 855.705686] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 855.705686] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] self.wait() [ 855.705686] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 855.705686] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] self[:] = self._gt.wait() [ 855.705686] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 855.705686] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] return self._exit_event.wait() [ 855.705686] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 855.705686] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] current.throw(*self._exc) [ 855.706018] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 855.706018] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] result = function(*args, **kwargs) [ 855.706018] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 855.706018] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] return func(*args, **kwargs) [ 855.706018] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 855.706018] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] raise e [ 855.706018] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 855.706018] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] nwinfo = self.network_api.allocate_for_instance( [ 855.706018] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 855.706018] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] created_port_ids = self._update_ports_for_instance( [ 855.706018] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 855.706018] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] with excutils.save_and_reraise_exception(): [ 855.706018] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 855.706339] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] self.force_reraise() [ 855.706339] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 855.706339] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] raise self.value [ 855.706339] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 855.706339] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] updated_port = self._update_port( [ 855.706339] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 855.706339] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] _ensure_no_port_binding_failure(port) [ 855.706339] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 855.706339] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] raise exception.PortBindingFailed(port_id=port['id']) [ 855.706339] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] nova.exception.PortBindingFailed: Binding failed for port 94ad734d-164b-4d36-83a3-e69aa8777538, please check neutron logs for more information. [ 855.706339] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] [ 855.706339] env[61273]: INFO nova.compute.manager [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] Terminating instance [ 855.707696] env[61273]: DEBUG oslo_concurrency.lockutils [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Acquiring lock "refresh_cache-9952d347-2ca7-48f2-8ee1-dc1d767402dc" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 855.745424] env[61273]: DEBUG nova.network.neutron [req-5c46bfcb-efa5-4153-807d-50cf985558ec req-e9909f21-da44-4660-88ba-032ec18761c4 service nova] [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 855.746042] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-5c46bfcb-efa5-4153-807d-50cf985558ec req-e9909f21-da44-4660-88ba-032ec18761c4 service nova] Expecting reply to msg 61656d7c2d814b039c720e0132401b94 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 855.754552] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 61656d7c2d814b039c720e0132401b94 [ 855.877456] env[61273]: DEBUG nova.network.neutron [-] [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 855.877921] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg f812d994d58b498a82e8a27d0660d140 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 855.887982] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f812d994d58b498a82e8a27d0660d140 [ 855.941975] env[61273]: DEBUG oslo_concurrency.lockutils [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.309s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 855.942452] env[61273]: DEBUG nova.compute.manager [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 855.944277] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg 368a235790514ba18933028a6816436e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 855.945724] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.484s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 855.947525] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg 0a389f92a8ea4b4e8b51825c8abfb96e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 855.972821] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 368a235790514ba18933028a6816436e [ 855.978771] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0a389f92a8ea4b4e8b51825c8abfb96e [ 856.249141] env[61273]: DEBUG oslo_concurrency.lockutils [req-5c46bfcb-efa5-4153-807d-50cf985558ec req-e9909f21-da44-4660-88ba-032ec18761c4 service nova] Releasing lock "refresh_cache-9952d347-2ca7-48f2-8ee1-dc1d767402dc" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 856.249647] env[61273]: DEBUG oslo_concurrency.lockutils [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Acquired lock "refresh_cache-9952d347-2ca7-48f2-8ee1-dc1d767402dc" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 856.249845] env[61273]: DEBUG nova.network.neutron [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 856.250281] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg c022525d9e1b43ab801e0bc154a69b9f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 856.257917] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c022525d9e1b43ab801e0bc154a69b9f [ 856.379968] env[61273]: INFO nova.compute.manager [-] [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] Took 1.02 seconds to deallocate network for instance. [ 856.382476] env[61273]: DEBUG nova.compute.claims [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 856.382731] env[61273]: DEBUG oslo_concurrency.lockutils [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 856.447696] env[61273]: DEBUG nova.compute.utils [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 856.448380] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg ec93204bf506449fa6d210de2c8aa1d2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 856.449345] env[61273]: DEBUG nova.compute.manager [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 856.449514] env[61273]: DEBUG nova.network.neutron [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 856.463845] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ec93204bf506449fa6d210de2c8aa1d2 [ 856.504292] env[61273]: DEBUG nova.policy [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f09e88112924400db81a4fbe611482f0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '70c9e6d7debd4d8e8cb7790975294a22', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 856.687318] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d5a78bd-1f3a-4871-b6a2-18fb25cdb722 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.695180] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7745565f-ba07-45b0-a47c-5bf10edeccc9 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.725379] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e9e4053-6c52-4ce7-8ba9-88998904ef99 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.735304] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6143c75-3305-4c38-9232-8e2e47e96726 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.748375] env[61273]: DEBUG nova.compute.provider_tree [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 856.748885] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg 2bd0f54358ec43a0a3341db7d05ef1cf in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 856.757494] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2bd0f54358ec43a0a3341db7d05ef1cf [ 856.763629] env[61273]: DEBUG nova.network.neutron [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] Successfully created port: 700bf580-c47d-4fd9-8067-c5c7564d83b0 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 856.767207] env[61273]: DEBUG nova.network.neutron [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 856.837973] env[61273]: DEBUG nova.network.neutron [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 856.838486] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 75ffb4a802f447b7a0842770357f6052 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 856.846334] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 75ffb4a802f447b7a0842770357f6052 [ 856.955898] env[61273]: DEBUG nova.compute.manager [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 856.958021] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg 74bdb520dec54816a3a04aa9db030baa in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 856.994420] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 74bdb520dec54816a3a04aa9db030baa [ 857.172525] env[61273]: DEBUG nova.compute.manager [req-5883ed25-70df-4005-916c-fad240e4943e req-3a13aa63-6621-48d1-889a-cc9ee2a92fbf service nova] [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] Received event network-vif-deleted-94ad734d-164b-4d36-83a3-e69aa8777538 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 857.251846] env[61273]: DEBUG nova.scheduler.client.report [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 857.254403] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg 51a978810e2748b1a8abcbb1e2ea240f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 857.273634] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 51a978810e2748b1a8abcbb1e2ea240f [ 857.341115] env[61273]: DEBUG oslo_concurrency.lockutils [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Releasing lock "refresh_cache-9952d347-2ca7-48f2-8ee1-dc1d767402dc" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 857.341778] env[61273]: DEBUG nova.compute.manager [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 857.342087] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 857.342804] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-687fe7dd-12db-4e0c-bf60-da17d62e3a10 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.353496] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69b2dde1-7c4e-467f-89b0-e17c4bcc702a {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.375495] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9952d347-2ca7-48f2-8ee1-dc1d767402dc could not be found. [ 857.375864] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 857.376230] env[61273]: INFO nova.compute.manager [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] Took 0.03 seconds to destroy the instance on the hypervisor. [ 857.376588] env[61273]: DEBUG oslo.service.loopingcall [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 857.376923] env[61273]: DEBUG nova.compute.manager [-] [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 857.377106] env[61273]: DEBUG nova.network.neutron [-] [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 857.392695] env[61273]: DEBUG nova.network.neutron [-] [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 857.393357] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 632409082d6a4127a8410850fe069e44 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 857.400460] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 632409082d6a4127a8410850fe069e44 [ 857.463294] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg eadd25ee568848e2a2578aa787677100 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 857.492687] env[61273]: ERROR nova.compute.manager [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 700bf580-c47d-4fd9-8067-c5c7564d83b0, please check neutron logs for more information. [ 857.492687] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 857.492687] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 857.492687] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 857.492687] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 857.492687] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 857.492687] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 857.492687] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 857.492687] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 857.492687] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 857.492687] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 857.492687] env[61273]: ERROR nova.compute.manager raise self.value [ 857.492687] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 857.492687] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 857.492687] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 857.492687] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 857.493180] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 857.493180] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 857.493180] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 700bf580-c47d-4fd9-8067-c5c7564d83b0, please check neutron logs for more information. [ 857.493180] env[61273]: ERROR nova.compute.manager [ 857.493514] env[61273]: Traceback (most recent call last): [ 857.493605] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 857.493605] env[61273]: listener.cb(fileno) [ 857.493721] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 857.493721] env[61273]: result = function(*args, **kwargs) [ 857.493795] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 857.493795] env[61273]: return func(*args, **kwargs) [ 857.493869] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 857.493869] env[61273]: raise e [ 857.493934] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 857.493934] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 857.494020] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 857.494020] env[61273]: created_port_ids = self._update_ports_for_instance( [ 857.494100] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 857.494100] env[61273]: with excutils.save_and_reraise_exception(): [ 857.494164] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 857.494164] env[61273]: self.force_reraise() [ 857.494644] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 857.494644] env[61273]: raise self.value [ 857.494775] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 857.494775] env[61273]: updated_port = self._update_port( [ 857.494846] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 857.494846] env[61273]: _ensure_no_port_binding_failure(port) [ 857.494918] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 857.494918] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 857.495016] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 700bf580-c47d-4fd9-8067-c5c7564d83b0, please check neutron logs for more information. [ 857.495181] env[61273]: Removing descriptor: 15 [ 857.503462] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eadd25ee568848e2a2578aa787677100 [ 857.764127] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.811s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 857.764127] env[61273]: ERROR nova.compute.manager [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 9debd209-244f-472a-b9d6-cf63bba98839] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 0f8ef003-3fad-4161-b909-098c989850aa, please check neutron logs for more information. [ 857.764127] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] Traceback (most recent call last): [ 857.764127] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 857.764127] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] self.driver.spawn(context, instance, image_meta, [ 857.764127] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 857.764127] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] self._vmops.spawn(context, instance, image_meta, injected_files, [ 857.764127] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 857.764127] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] vm_ref = self.build_virtual_machine(instance, [ 857.764459] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 857.764459] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] vif_infos = vmwarevif.get_vif_info(self._session, [ 857.764459] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 857.764459] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] for vif in network_info: [ 857.764459] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 857.764459] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] return self._sync_wrapper(fn, *args, **kwargs) [ 857.764459] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 857.764459] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] self.wait() [ 857.764459] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 857.764459] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] self[:] = self._gt.wait() [ 857.764459] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 857.764459] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] return self._exit_event.wait() [ 857.764459] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 857.764785] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] result = hub.switch() [ 857.764785] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 857.764785] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] return self.greenlet.switch() [ 857.764785] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 857.764785] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] result = function(*args, **kwargs) [ 857.764785] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 857.764785] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] return func(*args, **kwargs) [ 857.764785] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 857.764785] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] raise e [ 857.764785] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 857.764785] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] nwinfo = self.network_api.allocate_for_instance( [ 857.764785] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 857.764785] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] created_port_ids = self._update_ports_for_instance( [ 857.765259] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 857.765259] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] with excutils.save_and_reraise_exception(): [ 857.765259] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 857.765259] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] self.force_reraise() [ 857.765259] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 857.765259] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] raise self.value [ 857.765259] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 857.765259] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] updated_port = self._update_port( [ 857.765259] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 857.765259] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] _ensure_no_port_binding_failure(port) [ 857.765259] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 857.765259] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] raise exception.PortBindingFailed(port_id=port['id']) [ 857.765557] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] nova.exception.PortBindingFailed: Binding failed for port 0f8ef003-3fad-4161-b909-098c989850aa, please check neutron logs for more information. [ 857.765557] env[61273]: ERROR nova.compute.manager [instance: 9debd209-244f-472a-b9d6-cf63bba98839] [ 857.765557] env[61273]: DEBUG nova.compute.utils [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 9debd209-244f-472a-b9d6-cf63bba98839] Binding failed for port 0f8ef003-3fad-4161-b909-098c989850aa, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 857.765557] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.246s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 857.765557] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg 1015a1bfac4643268be9120740b7a15a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 857.765692] env[61273]: DEBUG nova.compute.manager [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 9debd209-244f-472a-b9d6-cf63bba98839] Build of instance 9debd209-244f-472a-b9d6-cf63bba98839 was re-scheduled: Binding failed for port 0f8ef003-3fad-4161-b909-098c989850aa, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 857.765692] env[61273]: DEBUG nova.compute.manager [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 9debd209-244f-472a-b9d6-cf63bba98839] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 857.765692] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Acquiring lock "refresh_cache-9debd209-244f-472a-b9d6-cf63bba98839" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 857.765692] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Acquired lock "refresh_cache-9debd209-244f-472a-b9d6-cf63bba98839" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.765835] env[61273]: DEBUG nova.network.neutron [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 9debd209-244f-472a-b9d6-cf63bba98839] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 857.766206] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg b18c002863144ae2ad2b52bff91cc3e4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 857.773241] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b18c002863144ae2ad2b52bff91cc3e4 [ 857.792499] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1015a1bfac4643268be9120740b7a15a [ 857.896030] env[61273]: DEBUG nova.network.neutron [-] [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 857.896439] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 7dfbed7ee19040048b90ceb9ce7e232c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 857.904362] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7dfbed7ee19040048b90ceb9ce7e232c [ 857.966384] env[61273]: DEBUG nova.compute.manager [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 857.995508] env[61273]: DEBUG nova.virt.hardware [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 857.995833] env[61273]: DEBUG nova.virt.hardware [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 857.996017] env[61273]: DEBUG nova.virt.hardware [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 857.996222] env[61273]: DEBUG nova.virt.hardware [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 857.996373] env[61273]: DEBUG nova.virt.hardware [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 857.996636] env[61273]: DEBUG nova.virt.hardware [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 857.996918] env[61273]: DEBUG nova.virt.hardware [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 857.997088] env[61273]: DEBUG nova.virt.hardware [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 857.997258] env[61273]: DEBUG nova.virt.hardware [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 857.997422] env[61273]: DEBUG nova.virt.hardware [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 857.997595] env[61273]: DEBUG nova.virt.hardware [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 857.998530] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-217a64f9-3c30-48d9-a868-1272b6840d4f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.006768] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a45ac7f7-24e7-440f-8336-6ce66a903b3d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.021537] env[61273]: ERROR nova.compute.manager [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 700bf580-c47d-4fd9-8067-c5c7564d83b0, please check neutron logs for more information. [ 858.021537] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] Traceback (most recent call last): [ 858.021537] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 858.021537] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] yield resources [ 858.021537] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 858.021537] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] self.driver.spawn(context, instance, image_meta, [ 858.021537] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 858.021537] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] self._vmops.spawn(context, instance, image_meta, injected_files, [ 858.021537] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 858.021537] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] vm_ref = self.build_virtual_machine(instance, [ 858.021537] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 858.021966] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] vif_infos = vmwarevif.get_vif_info(self._session, [ 858.021966] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 858.021966] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] for vif in network_info: [ 858.021966] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 858.021966] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] return self._sync_wrapper(fn, *args, **kwargs) [ 858.021966] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 858.021966] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] self.wait() [ 858.021966] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 858.021966] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] self[:] = self._gt.wait() [ 858.021966] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 858.021966] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] return self._exit_event.wait() [ 858.021966] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 858.021966] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] current.throw(*self._exc) [ 858.022310] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 858.022310] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] result = function(*args, **kwargs) [ 858.022310] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 858.022310] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] return func(*args, **kwargs) [ 858.022310] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 858.022310] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] raise e [ 858.022310] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 858.022310] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] nwinfo = self.network_api.allocate_for_instance( [ 858.022310] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 858.022310] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] created_port_ids = self._update_ports_for_instance( [ 858.022310] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 858.022310] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] with excutils.save_and_reraise_exception(): [ 858.022310] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 858.022664] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] self.force_reraise() [ 858.022664] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 858.022664] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] raise self.value [ 858.022664] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 858.022664] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] updated_port = self._update_port( [ 858.022664] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 858.022664] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] _ensure_no_port_binding_failure(port) [ 858.022664] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 858.022664] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] raise exception.PortBindingFailed(port_id=port['id']) [ 858.022664] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] nova.exception.PortBindingFailed: Binding failed for port 700bf580-c47d-4fd9-8067-c5c7564d83b0, please check neutron logs for more information. [ 858.022664] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] [ 858.022664] env[61273]: INFO nova.compute.manager [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] Terminating instance [ 858.023218] env[61273]: DEBUG oslo_concurrency.lockutils [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Acquiring lock "refresh_cache-1fde207b-9d32-4cff-b3fe-d0caddd20f69" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 858.023471] env[61273]: DEBUG oslo_concurrency.lockutils [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Acquired lock "refresh_cache-1fde207b-9d32-4cff-b3fe-d0caddd20f69" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 858.023824] env[61273]: DEBUG nova.network.neutron [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 858.024361] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg 6b4112566a1b46c7854a9b08869b9e01 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 858.031549] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6b4112566a1b46c7854a9b08869b9e01 [ 858.288756] env[61273]: DEBUG nova.network.neutron [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 9debd209-244f-472a-b9d6-cf63bba98839] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 858.391428] env[61273]: DEBUG nova.network.neutron [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 9debd209-244f-472a-b9d6-cf63bba98839] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 858.392159] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg 977c489207ae408e90460caa27d3ecf8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 858.400121] env[61273]: INFO nova.compute.manager [-] [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] Took 1.02 seconds to deallocate network for instance. [ 858.400943] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 977c489207ae408e90460caa27d3ecf8 [ 858.403021] env[61273]: DEBUG nova.compute.claims [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 858.403309] env[61273]: DEBUG oslo_concurrency.lockutils [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 858.482097] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39f2b38f-d0a4-4794-ad80-eea77b8e20a5 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.491332] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14ff8f66-d5be-4e7e-a040-ffea99bc88cb {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.520905] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e58f4a6-de55-456a-9cd4-108d2f6772bb {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.530020] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9002ef13-48b8-49bf-8202-10336735ce0a {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.544848] env[61273]: DEBUG nova.compute.provider_tree [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 858.545479] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg bf912218e6a748c68b7dab4f78a691ab in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 858.547342] env[61273]: DEBUG nova.network.neutron [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 858.553650] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bf912218e6a748c68b7dab4f78a691ab [ 858.610111] env[61273]: DEBUG nova.network.neutron [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 858.610774] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg f264ca3ff8aa4e1196b0b70ea550635a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 858.619266] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f264ca3ff8aa4e1196b0b70ea550635a [ 858.894415] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Releasing lock "refresh_cache-9debd209-244f-472a-b9d6-cf63bba98839" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 858.894965] env[61273]: DEBUG nova.compute.manager [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 858.895312] env[61273]: DEBUG nova.compute.manager [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 9debd209-244f-472a-b9d6-cf63bba98839] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 858.895604] env[61273]: DEBUG nova.network.neutron [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 9debd209-244f-472a-b9d6-cf63bba98839] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 858.911945] env[61273]: DEBUG nova.network.neutron [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 9debd209-244f-472a-b9d6-cf63bba98839] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 858.912634] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg 747d6365f5664d958671b8957db12d50 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 858.920233] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 747d6365f5664d958671b8957db12d50 [ 859.054097] env[61273]: DEBUG nova.scheduler.client.report [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 859.054097] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg 4efe002681594c2988b17692428c6318 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 859.067465] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4efe002681594c2988b17692428c6318 [ 859.113423] env[61273]: DEBUG oslo_concurrency.lockutils [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Releasing lock "refresh_cache-1fde207b-9d32-4cff-b3fe-d0caddd20f69" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 859.114025] env[61273]: DEBUG nova.compute.manager [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 859.114356] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 859.114767] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-96e2d495-01fe-4abe-9128-8f2ee63474d3 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.124203] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45eec8f3-7f41-46d6-b1f1-7e25f7bc9b80 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.146921] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1fde207b-9d32-4cff-b3fe-d0caddd20f69 could not be found. [ 859.147302] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 859.147588] env[61273]: INFO nova.compute.manager [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] Took 0.03 seconds to destroy the instance on the hypervisor. [ 859.147958] env[61273]: DEBUG oslo.service.loopingcall [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 859.148318] env[61273]: DEBUG nova.compute.manager [-] [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 859.148529] env[61273]: DEBUG nova.network.neutron [-] [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 859.168482] env[61273]: DEBUG nova.network.neutron [-] [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 859.169094] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 8f4479f4bb034f2788c5114962686f8d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 859.176329] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8f4479f4bb034f2788c5114962686f8d [ 859.197919] env[61273]: DEBUG nova.compute.manager [req-f035cb02-2f64-46ef-b876-ed22f7f9a8ac req-3d622f64-add2-46f0-8f35-cd205a100123 service nova] [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] Received event network-changed-700bf580-c47d-4fd9-8067-c5c7564d83b0 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 859.198241] env[61273]: DEBUG nova.compute.manager [req-f035cb02-2f64-46ef-b876-ed22f7f9a8ac req-3d622f64-add2-46f0-8f35-cd205a100123 service nova] [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] Refreshing instance network info cache due to event network-changed-700bf580-c47d-4fd9-8067-c5c7564d83b0. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 859.198567] env[61273]: DEBUG oslo_concurrency.lockutils [req-f035cb02-2f64-46ef-b876-ed22f7f9a8ac req-3d622f64-add2-46f0-8f35-cd205a100123 service nova] Acquiring lock "refresh_cache-1fde207b-9d32-4cff-b3fe-d0caddd20f69" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 859.198833] env[61273]: DEBUG oslo_concurrency.lockutils [req-f035cb02-2f64-46ef-b876-ed22f7f9a8ac req-3d622f64-add2-46f0-8f35-cd205a100123 service nova] Acquired lock "refresh_cache-1fde207b-9d32-4cff-b3fe-d0caddd20f69" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 859.199124] env[61273]: DEBUG nova.network.neutron [req-f035cb02-2f64-46ef-b876-ed22f7f9a8ac req-3d622f64-add2-46f0-8f35-cd205a100123 service nova] [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] Refreshing network info cache for port 700bf580-c47d-4fd9-8067-c5c7564d83b0 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 859.199654] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-f035cb02-2f64-46ef-b876-ed22f7f9a8ac req-3d622f64-add2-46f0-8f35-cd205a100123 service nova] Expecting reply to msg 9c4f0e56623d4fa2af3fec1323332b27 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 859.206145] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9c4f0e56623d4fa2af3fec1323332b27 [ 859.415248] env[61273]: DEBUG nova.network.neutron [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 9debd209-244f-472a-b9d6-cf63bba98839] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 859.416260] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg d61a3e1bca644585b5b933edbc8fc979 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 859.424488] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d61a3e1bca644585b5b933edbc8fc979 [ 859.556408] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.797s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 859.557266] env[61273]: ERROR nova.compute.manager [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port ec4e0a91-b568-46e5-b1f8-d48f15b87d5c, please check neutron logs for more information. [ 859.557266] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] Traceback (most recent call last): [ 859.557266] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 859.557266] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] self.driver.spawn(context, instance, image_meta, [ 859.557266] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 859.557266] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] self._vmops.spawn(context, instance, image_meta, injected_files, [ 859.557266] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 859.557266] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] vm_ref = self.build_virtual_machine(instance, [ 859.557266] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 859.557266] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] vif_infos = vmwarevif.get_vif_info(self._session, [ 859.557266] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 859.557663] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] for vif in network_info: [ 859.557663] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 859.557663] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] return self._sync_wrapper(fn, *args, **kwargs) [ 859.557663] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 859.557663] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] self.wait() [ 859.557663] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 859.557663] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] self[:] = self._gt.wait() [ 859.557663] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 859.557663] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] return self._exit_event.wait() [ 859.557663] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 859.557663] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] current.throw(*self._exc) [ 859.557663] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 859.557663] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] result = function(*args, **kwargs) [ 859.558044] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 859.558044] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] return func(*args, **kwargs) [ 859.558044] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 859.558044] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] raise e [ 859.558044] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 859.558044] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] nwinfo = self.network_api.allocate_for_instance( [ 859.558044] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 859.558044] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] created_port_ids = self._update_ports_for_instance( [ 859.558044] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 859.558044] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] with excutils.save_and_reraise_exception(): [ 859.558044] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 859.558044] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] self.force_reraise() [ 859.558044] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 859.558417] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] raise self.value [ 859.558417] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 859.558417] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] updated_port = self._update_port( [ 859.558417] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 859.558417] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] _ensure_no_port_binding_failure(port) [ 859.558417] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 859.558417] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] raise exception.PortBindingFailed(port_id=port['id']) [ 859.558417] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] nova.exception.PortBindingFailed: Binding failed for port ec4e0a91-b568-46e5-b1f8-d48f15b87d5c, please check neutron logs for more information. [ 859.558417] env[61273]: ERROR nova.compute.manager [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] [ 859.559457] env[61273]: DEBUG nova.compute.utils [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] Binding failed for port ec4e0a91-b568-46e5-b1f8-d48f15b87d5c, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 859.560759] env[61273]: DEBUG oslo_concurrency.lockutils [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.763s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 859.562785] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg 474c56dcdb5645f28fbd1c97a99f19d9 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 859.564048] env[61273]: DEBUG nova.compute.manager [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] Build of instance faaabf1e-74af-4cfa-ba1c-e2c2fabad124 was re-scheduled: Binding failed for port ec4e0a91-b568-46e5-b1f8-d48f15b87d5c, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 859.564639] env[61273]: DEBUG nova.compute.manager [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 859.564969] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Acquiring lock "refresh_cache-faaabf1e-74af-4cfa-ba1c-e2c2fabad124" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 859.565230] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Acquired lock "refresh_cache-faaabf1e-74af-4cfa-ba1c-e2c2fabad124" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 859.565498] env[61273]: DEBUG nova.network.neutron [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 859.565955] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg 77e8f91cb39b427294242fbca046de68 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 859.572170] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 77e8f91cb39b427294242fbca046de68 [ 859.596570] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 474c56dcdb5645f28fbd1c97a99f19d9 [ 859.671125] env[61273]: DEBUG nova.network.neutron [-] [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 859.671753] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 680d4573bab64ee396c2988c99beb909 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 859.680491] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 680d4573bab64ee396c2988c99beb909 [ 859.719112] env[61273]: DEBUG nova.network.neutron [req-f035cb02-2f64-46ef-b876-ed22f7f9a8ac req-3d622f64-add2-46f0-8f35-cd205a100123 service nova] [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 859.795988] env[61273]: DEBUG nova.network.neutron [req-f035cb02-2f64-46ef-b876-ed22f7f9a8ac req-3d622f64-add2-46f0-8f35-cd205a100123 service nova] [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 859.796178] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-f035cb02-2f64-46ef-b876-ed22f7f9a8ac req-3d622f64-add2-46f0-8f35-cd205a100123 service nova] Expecting reply to msg 640e1852b9724aa9a9dca991b0a624b8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 859.806008] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 640e1852b9724aa9a9dca991b0a624b8 [ 859.918985] env[61273]: INFO nova.compute.manager [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 9debd209-244f-472a-b9d6-cf63bba98839] Took 1.02 seconds to deallocate network for instance. [ 859.920754] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg 677448d2e14844abacbe054977f75fcd in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 859.952774] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 677448d2e14844abacbe054977f75fcd [ 860.088103] env[61273]: DEBUG nova.network.neutron [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 860.155078] env[61273]: DEBUG nova.network.neutron [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 860.155575] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg 596d4b39c52e4ddd92040002a0285b60 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 860.163534] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 596d4b39c52e4ddd92040002a0285b60 [ 860.174077] env[61273]: INFO nova.compute.manager [-] [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] Took 1.03 seconds to deallocate network for instance. [ 860.176183] env[61273]: DEBUG nova.compute.claims [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 860.176366] env[61273]: DEBUG oslo_concurrency.lockutils [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 860.259214] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8c562c7-26fb-4a64-a21f-735b911ca641 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.267313] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50182629-54ce-45bf-8ab3-aef7cf7daf33 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.298809] env[61273]: DEBUG oslo_concurrency.lockutils [req-f035cb02-2f64-46ef-b876-ed22f7f9a8ac req-3d622f64-add2-46f0-8f35-cd205a100123 service nova] Releasing lock "refresh_cache-1fde207b-9d32-4cff-b3fe-d0caddd20f69" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 860.299070] env[61273]: DEBUG nova.compute.manager [req-f035cb02-2f64-46ef-b876-ed22f7f9a8ac req-3d622f64-add2-46f0-8f35-cd205a100123 service nova] [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] Received event network-vif-deleted-700bf580-c47d-4fd9-8067-c5c7564d83b0 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 860.300075] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c38a4ed-624c-4a44-b9c1-f9e2fbe8efd3 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.307223] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98e580a6-eaf3-487b-a284-a80e3c600f6b {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.319905] env[61273]: DEBUG nova.compute.provider_tree [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 860.320429] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg 82f6f7cb5d67419c87dd9c249ac89937 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 860.327632] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 82f6f7cb5d67419c87dd9c249ac89937 [ 860.426229] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg 20cbbae59d3a4e9c8e5228e52ab0a440 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 860.458085] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 20cbbae59d3a4e9c8e5228e52ab0a440 [ 860.660316] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Releasing lock "refresh_cache-faaabf1e-74af-4cfa-ba1c-e2c2fabad124" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 860.660316] env[61273]: DEBUG nova.compute.manager [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 860.660316] env[61273]: DEBUG nova.compute.manager [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 860.660316] env[61273]: DEBUG nova.network.neutron [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 860.680621] env[61273]: DEBUG nova.network.neutron [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 860.681237] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg 5a3c421d46f64d2a9f5eb62619ecb635 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 860.690361] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5a3c421d46f64d2a9f5eb62619ecb635 [ 860.822695] env[61273]: DEBUG nova.scheduler.client.report [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 860.825310] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg 9ef53277d21e4b10905aac5e20dda30a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 860.839347] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9ef53277d21e4b10905aac5e20dda30a [ 860.959711] env[61273]: INFO nova.scheduler.client.report [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Deleted allocations for instance 9debd209-244f-472a-b9d6-cf63bba98839 [ 860.967671] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg 8f041563250d40bc9f13513ccf22ac0f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 860.985113] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8f041563250d40bc9f13513ccf22ac0f [ 861.183477] env[61273]: DEBUG nova.network.neutron [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 861.183962] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg 18cffc94572f44529d9d198b324ed25a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 861.192788] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 18cffc94572f44529d9d198b324ed25a [ 861.327643] env[61273]: DEBUG oslo_concurrency.lockutils [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.767s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 861.328314] env[61273]: ERROR nova.compute.manager [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 5eb78d19-b6bf-451e-880f-0ae3168b6fd2, please check neutron logs for more information. [ 861.328314] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] Traceback (most recent call last): [ 861.328314] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 861.328314] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] self.driver.spawn(context, instance, image_meta, [ 861.328314] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 861.328314] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] self._vmops.spawn(context, instance, image_meta, injected_files, [ 861.328314] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 861.328314] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] vm_ref = self.build_virtual_machine(instance, [ 861.328314] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 861.328314] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] vif_infos = vmwarevif.get_vif_info(self._session, [ 861.328314] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 861.328703] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] for vif in network_info: [ 861.328703] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 861.328703] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] return self._sync_wrapper(fn, *args, **kwargs) [ 861.328703] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 861.328703] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] self.wait() [ 861.328703] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 861.328703] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] self[:] = self._gt.wait() [ 861.328703] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 861.328703] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] return self._exit_event.wait() [ 861.328703] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 861.328703] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] current.throw(*self._exc) [ 861.328703] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 861.328703] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] result = function(*args, **kwargs) [ 861.329139] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 861.329139] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] return func(*args, **kwargs) [ 861.329139] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 861.329139] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] raise e [ 861.329139] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 861.329139] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] nwinfo = self.network_api.allocate_for_instance( [ 861.329139] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 861.329139] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] created_port_ids = self._update_ports_for_instance( [ 861.329139] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 861.329139] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] with excutils.save_and_reraise_exception(): [ 861.329139] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 861.329139] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] self.force_reraise() [ 861.329139] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 861.329521] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] raise self.value [ 861.329521] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 861.329521] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] updated_port = self._update_port( [ 861.329521] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 861.329521] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] _ensure_no_port_binding_failure(port) [ 861.329521] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 861.329521] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] raise exception.PortBindingFailed(port_id=port['id']) [ 861.329521] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] nova.exception.PortBindingFailed: Binding failed for port 5eb78d19-b6bf-451e-880f-0ae3168b6fd2, please check neutron logs for more information. [ 861.329521] env[61273]: ERROR nova.compute.manager [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] [ 861.329521] env[61273]: DEBUG nova.compute.utils [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] Binding failed for port 5eb78d19-b6bf-451e-880f-0ae3168b6fd2, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 861.330325] env[61273]: DEBUG oslo_concurrency.lockutils [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.108s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 861.332230] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg a6151cceaced44fca4064a5ec9b4e573 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 861.333925] env[61273]: DEBUG nova.compute.manager [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] Build of instance 98f63a99-f1b8-4420-978d-7b69c39a2692 was re-scheduled: Binding failed for port 5eb78d19-b6bf-451e-880f-0ae3168b6fd2, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 861.334366] env[61273]: DEBUG nova.compute.manager [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 861.334590] env[61273]: DEBUG oslo_concurrency.lockutils [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Acquiring lock "refresh_cache-98f63a99-f1b8-4420-978d-7b69c39a2692" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 861.334737] env[61273]: DEBUG oslo_concurrency.lockutils [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Acquired lock "refresh_cache-98f63a99-f1b8-4420-978d-7b69c39a2692" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 861.334900] env[61273]: DEBUG nova.network.neutron [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 861.335270] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg 6230ef55064d487d9a1f249d17433d8c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 861.343495] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6230ef55064d487d9a1f249d17433d8c [ 861.364040] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a6151cceaced44fca4064a5ec9b4e573 [ 861.469692] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c4188ca8-21f0-4668-8f75-2059b2925a67 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Lock "9debd209-244f-472a-b9d6-cf63bba98839" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 94.088s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 861.470449] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Expecting reply to msg 1eba77b13b714015b677e4b64a639dd6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 861.481456] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1eba77b13b714015b677e4b64a639dd6 [ 861.686896] env[61273]: INFO nova.compute.manager [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: faaabf1e-74af-4cfa-ba1c-e2c2fabad124] Took 1.03 seconds to deallocate network for instance. [ 861.688678] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg 8f4af6f0b84d4a7e9b31744d803f180f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 861.723123] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8f4af6f0b84d4a7e9b31744d803f180f [ 861.857354] env[61273]: DEBUG nova.network.neutron [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 861.939846] env[61273]: DEBUG nova.network.neutron [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 861.940414] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg 309fdae863484f0094704abdff47bd7f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 861.948716] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 309fdae863484f0094704abdff47bd7f [ 861.972507] env[61273]: DEBUG nova.compute.manager [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 861.974254] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Expecting reply to msg d0145cd6828a49f3a327df190f64657a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 862.009582] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d0145cd6828a49f3a327df190f64657a [ 862.053556] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-505feda9-011c-4b7f-a9b9-e8a9abf05fba {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.061276] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5221c505-e7ba-4c27-b88f-32421879aed2 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.090427] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ae66f78-5328-4272-8209-02d60577ea0c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.097494] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66fb4e7f-dc42-4b14-b36c-6ae6f1b699aa {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.109822] env[61273]: DEBUG nova.compute.provider_tree [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 862.110314] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg a6ac1927370a442ab43ddb99bf520ddb in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 862.117734] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a6ac1927370a442ab43ddb99bf520ddb [ 862.193439] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg 2b7196a4709f4faab9f0d32dcf352b49 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 862.226216] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2b7196a4709f4faab9f0d32dcf352b49 [ 862.442872] env[61273]: DEBUG oslo_concurrency.lockutils [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Releasing lock "refresh_cache-98f63a99-f1b8-4420-978d-7b69c39a2692" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 862.443129] env[61273]: DEBUG nova.compute.manager [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 862.443304] env[61273]: DEBUG nova.compute.manager [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 862.443475] env[61273]: DEBUG nova.network.neutron [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 862.459443] env[61273]: DEBUG nova.network.neutron [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 862.460070] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg 61c994097b454279afb440996b4c000f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 862.467003] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 61c994097b454279afb440996b4c000f [ 862.490101] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 862.612816] env[61273]: DEBUG nova.scheduler.client.report [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 862.615232] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg c96f98eefdab4e568be5953932a55f2d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 862.626451] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c96f98eefdab4e568be5953932a55f2d [ 862.712495] env[61273]: INFO nova.scheduler.client.report [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Deleted allocations for instance faaabf1e-74af-4cfa-ba1c-e2c2fabad124 [ 862.718520] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg cf3e5b0128c04fffa9807c7a14f04b17 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 862.735162] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cf3e5b0128c04fffa9807c7a14f04b17 [ 862.963031] env[61273]: DEBUG nova.network.neutron [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 862.963605] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg 89305a9493e249a0b0d5c4fbe595fd75 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 862.972539] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 89305a9493e249a0b0d5c4fbe595fd75 [ 863.117369] env[61273]: DEBUG oslo_concurrency.lockutils [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.787s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 863.118013] env[61273]: ERROR nova.compute.manager [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port d806ad66-e122-4622-8f56-bfe27013f645, please check neutron logs for more information. [ 863.118013] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] Traceback (most recent call last): [ 863.118013] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 863.118013] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] self.driver.spawn(context, instance, image_meta, [ 863.118013] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 863.118013] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] self._vmops.spawn(context, instance, image_meta, injected_files, [ 863.118013] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 863.118013] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] vm_ref = self.build_virtual_machine(instance, [ 863.118013] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 863.118013] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] vif_infos = vmwarevif.get_vif_info(self._session, [ 863.118013] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 863.118550] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] for vif in network_info: [ 863.118550] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 863.118550] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] return self._sync_wrapper(fn, *args, **kwargs) [ 863.118550] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 863.118550] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] self.wait() [ 863.118550] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 863.118550] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] self[:] = self._gt.wait() [ 863.118550] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 863.118550] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] return self._exit_event.wait() [ 863.118550] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 863.118550] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] current.throw(*self._exc) [ 863.118550] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 863.118550] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] result = function(*args, **kwargs) [ 863.119219] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 863.119219] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] return func(*args, **kwargs) [ 863.119219] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 863.119219] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] raise e [ 863.119219] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 863.119219] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] nwinfo = self.network_api.allocate_for_instance( [ 863.119219] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 863.119219] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] created_port_ids = self._update_ports_for_instance( [ 863.119219] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 863.119219] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] with excutils.save_and_reraise_exception(): [ 863.119219] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 863.119219] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] self.force_reraise() [ 863.119219] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 863.119840] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] raise self.value [ 863.119840] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 863.119840] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] updated_port = self._update_port( [ 863.119840] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 863.119840] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] _ensure_no_port_binding_failure(port) [ 863.119840] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 863.119840] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] raise exception.PortBindingFailed(port_id=port['id']) [ 863.119840] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] nova.exception.PortBindingFailed: Binding failed for port d806ad66-e122-4622-8f56-bfe27013f645, please check neutron logs for more information. [ 863.119840] env[61273]: ERROR nova.compute.manager [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] [ 863.119840] env[61273]: DEBUG nova.compute.utils [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] Binding failed for port d806ad66-e122-4622-8f56-bfe27013f645, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 863.120301] env[61273]: DEBUG oslo_concurrency.lockutils [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.568s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 863.121718] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg 736f07e9eb294dad93a2efff1af03dfd in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 863.123571] env[61273]: DEBUG nova.compute.manager [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] Build of instance 05df2575-9c3f-43d4-8fe4-52a808e11080 was re-scheduled: Binding failed for port d806ad66-e122-4622-8f56-bfe27013f645, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 863.123999] env[61273]: DEBUG nova.compute.manager [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 863.124310] env[61273]: DEBUG oslo_concurrency.lockutils [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Acquiring lock "refresh_cache-05df2575-9c3f-43d4-8fe4-52a808e11080" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 863.124472] env[61273]: DEBUG oslo_concurrency.lockutils [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Acquired lock "refresh_cache-05df2575-9c3f-43d4-8fe4-52a808e11080" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 863.124634] env[61273]: DEBUG nova.network.neutron [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 863.125000] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg 5897ea4430404ffc94e62b285ec2c6aa in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 863.131356] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5897ea4430404ffc94e62b285ec2c6aa [ 863.151810] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 736f07e9eb294dad93a2efff1af03dfd [ 863.227504] env[61273]: DEBUG oslo_concurrency.lockutils [None req-c9eb77d9-4674-4b01-8c5c-0f447c5838bb tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Lock "faaabf1e-74af-4cfa-ba1c-e2c2fabad124" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 95.453s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 863.228125] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 96cc496524b64c1e9b52240470b03084 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 863.239292] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 96cc496524b64c1e9b52240470b03084 [ 863.465959] env[61273]: INFO nova.compute.manager [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] [instance: 98f63a99-f1b8-4420-978d-7b69c39a2692] Took 1.02 seconds to deallocate network for instance. [ 863.467950] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg f4fe83bb955245d0abf6b4a36d93a79d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 863.507718] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f4fe83bb955245d0abf6b4a36d93a79d [ 863.650182] env[61273]: DEBUG nova.network.neutron [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 863.731661] env[61273]: DEBUG nova.compute.manager [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 863.733782] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg a1139d0a2e40474d91d3df6e2b7c8301 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 863.780016] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a1139d0a2e40474d91d3df6e2b7c8301 [ 863.784136] env[61273]: DEBUG nova.network.neutron [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 863.784707] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg d0b18a0ab7ed4bc2833857bf44d004ca in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 863.792519] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d0b18a0ab7ed4bc2833857bf44d004ca [ 863.873948] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20d7853e-d11b-47d6-b9c7-6c9d797b69dc {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.881402] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d68ecce9-7c78-4855-baa2-4687bb41c2e4 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.910989] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f67df056-78bb-4e45-ab3a-12265f8c1310 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.917641] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31a307a0-1c33-441f-9ff7-230bb12a7345 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.931097] env[61273]: DEBUG nova.compute.provider_tree [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 863.931595] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg 195a75589e7141b8ad3052dc6438758d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 863.938688] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 195a75589e7141b8ad3052dc6438758d [ 863.974109] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg 43d3f126a45a470d80c1eeee6a1113b2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 864.004491] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 43d3f126a45a470d80c1eeee6a1113b2 [ 864.251014] env[61273]: DEBUG oslo_concurrency.lockutils [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 864.287678] env[61273]: DEBUG oslo_concurrency.lockutils [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Releasing lock "refresh_cache-05df2575-9c3f-43d4-8fe4-52a808e11080" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 864.287930] env[61273]: DEBUG nova.compute.manager [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 864.288131] env[61273]: DEBUG nova.compute.manager [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 864.288301] env[61273]: DEBUG nova.network.neutron [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 864.303313] env[61273]: DEBUG nova.network.neutron [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 864.303881] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg 64714c6638a84deca94573ba0dc37457 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 864.311506] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 64714c6638a84deca94573ba0dc37457 [ 864.434229] env[61273]: DEBUG nova.scheduler.client.report [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 864.436687] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg 544dc2ce8f1a43e3b5c4dd08feebe3e4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 864.448666] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 544dc2ce8f1a43e3b5c4dd08feebe3e4 [ 864.495376] env[61273]: INFO nova.scheduler.client.report [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Deleted allocations for instance 98f63a99-f1b8-4420-978d-7b69c39a2692 [ 864.501617] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Expecting reply to msg 7cee8bed934d4db6bf1dfacd7e002938 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 864.518659] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7cee8bed934d4db6bf1dfacd7e002938 [ 864.805734] env[61273]: DEBUG nova.network.neutron [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 864.806291] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg 4dd840644de942119e5b4abf1bc4e49a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 864.814587] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4dd840644de942119e5b4abf1bc4e49a [ 864.939764] env[61273]: DEBUG oslo_concurrency.lockutils [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.820s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 864.940420] env[61273]: ERROR nova.compute.manager [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: ae9866e2-544a-4d26-b198-87110f42f054] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port fdce2ef1-b287-49fc-9741-089b76b0ab0f, please check neutron logs for more information. [ 864.940420] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] Traceback (most recent call last): [ 864.940420] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 864.940420] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] self.driver.spawn(context, instance, image_meta, [ 864.940420] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 864.940420] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] self._vmops.spawn(context, instance, image_meta, injected_files, [ 864.940420] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 864.940420] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] vm_ref = self.build_virtual_machine(instance, [ 864.940420] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 864.940420] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] vif_infos = vmwarevif.get_vif_info(self._session, [ 864.940420] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 864.940784] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] for vif in network_info: [ 864.940784] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 864.940784] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] return self._sync_wrapper(fn, *args, **kwargs) [ 864.940784] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 864.940784] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] self.wait() [ 864.940784] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 864.940784] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] self[:] = self._gt.wait() [ 864.940784] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 864.940784] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] return self._exit_event.wait() [ 864.940784] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 864.940784] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] current.throw(*self._exc) [ 864.940784] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 864.940784] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] result = function(*args, **kwargs) [ 864.941258] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 864.941258] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] return func(*args, **kwargs) [ 864.941258] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 864.941258] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] raise e [ 864.941258] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 864.941258] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] nwinfo = self.network_api.allocate_for_instance( [ 864.941258] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 864.941258] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] created_port_ids = self._update_ports_for_instance( [ 864.941258] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 864.941258] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] with excutils.save_and_reraise_exception(): [ 864.941258] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 864.941258] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] self.force_reraise() [ 864.941258] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 864.941685] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] raise self.value [ 864.941685] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 864.941685] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] updated_port = self._update_port( [ 864.941685] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 864.941685] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] _ensure_no_port_binding_failure(port) [ 864.941685] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 864.941685] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] raise exception.PortBindingFailed(port_id=port['id']) [ 864.941685] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] nova.exception.PortBindingFailed: Binding failed for port fdce2ef1-b287-49fc-9741-089b76b0ab0f, please check neutron logs for more information. [ 864.941685] env[61273]: ERROR nova.compute.manager [instance: ae9866e2-544a-4d26-b198-87110f42f054] [ 864.941685] env[61273]: DEBUG nova.compute.utils [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: ae9866e2-544a-4d26-b198-87110f42f054] Binding failed for port fdce2ef1-b287-49fc-9741-089b76b0ab0f, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 864.942404] env[61273]: DEBUG oslo_concurrency.lockutils [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.046s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 864.943941] env[61273]: INFO nova.compute.claims [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 864.945522] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg 28856a0bf18e47f2bc7950860bd2b5cc in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 864.947147] env[61273]: DEBUG nova.compute.manager [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: ae9866e2-544a-4d26-b198-87110f42f054] Build of instance ae9866e2-544a-4d26-b198-87110f42f054 was re-scheduled: Binding failed for port fdce2ef1-b287-49fc-9741-089b76b0ab0f, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 864.947730] env[61273]: DEBUG nova.compute.manager [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: ae9866e2-544a-4d26-b198-87110f42f054] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 864.947959] env[61273]: DEBUG oslo_concurrency.lockutils [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Acquiring lock "refresh_cache-ae9866e2-544a-4d26-b198-87110f42f054" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 864.948128] env[61273]: DEBUG oslo_concurrency.lockutils [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Acquired lock "refresh_cache-ae9866e2-544a-4d26-b198-87110f42f054" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 864.948287] env[61273]: DEBUG nova.network.neutron [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: ae9866e2-544a-4d26-b198-87110f42f054] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 864.948653] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg a8a0e4ec6fed45e9b698cd07b3163e79 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 864.960403] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a8a0e4ec6fed45e9b698cd07b3163e79 [ 864.987030] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 28856a0bf18e47f2bc7950860bd2b5cc [ 865.010484] env[61273]: DEBUG oslo_concurrency.lockutils [None req-deaffb0e-96a1-4b52-8e20-4c6e8d077769 tempest-ListServerFiltersTestJSON-85515236 tempest-ListServerFiltersTestJSON-85515236-project-member] Lock "98f63a99-f1b8-4420-978d-7b69c39a2692" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 96.909s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 865.011055] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Expecting reply to msg 7f912ae3b368486aa05168a65cc10dbb in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 865.028166] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7f912ae3b368486aa05168a65cc10dbb [ 865.309339] env[61273]: INFO nova.compute.manager [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: 05df2575-9c3f-43d4-8fe4-52a808e11080] Took 1.02 seconds to deallocate network for instance. [ 865.311077] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg 1710f099be9f4bb284aabf8d893d114f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 865.355065] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1710f099be9f4bb284aabf8d893d114f [ 865.452324] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg f8eb4ed68f484b87ac696f1764ef7d24 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 865.461109] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f8eb4ed68f484b87ac696f1764ef7d24 [ 865.474763] env[61273]: DEBUG nova.network.neutron [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: ae9866e2-544a-4d26-b198-87110f42f054] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 865.513432] env[61273]: DEBUG nova.compute.manager [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 865.515311] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Expecting reply to msg 65d0763b656144879ffd36592b2af73c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 865.547778] env[61273]: DEBUG nova.network.neutron [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: ae9866e2-544a-4d26-b198-87110f42f054] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 865.548304] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg 0c35ef05b7e2405f991b999a656b9ae4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 865.550715] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 65d0763b656144879ffd36592b2af73c [ 865.556193] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0c35ef05b7e2405f991b999a656b9ae4 [ 865.815512] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg 379f56401cab42ccbb4fcae8fda0a965 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 865.876935] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 379f56401cab42ccbb4fcae8fda0a965 [ 866.045583] env[61273]: DEBUG oslo_concurrency.lockutils [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 866.050565] env[61273]: DEBUG oslo_concurrency.lockutils [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Releasing lock "refresh_cache-ae9866e2-544a-4d26-b198-87110f42f054" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 866.050819] env[61273]: DEBUG nova.compute.manager [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 866.050966] env[61273]: DEBUG nova.compute.manager [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: ae9866e2-544a-4d26-b198-87110f42f054] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 866.051138] env[61273]: DEBUG nova.network.neutron [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: ae9866e2-544a-4d26-b198-87110f42f054] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 866.075386] env[61273]: DEBUG nova.network.neutron [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: ae9866e2-544a-4d26-b198-87110f42f054] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 866.075957] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg d14e5f3aa8b34088af4938b11f4de5e3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 866.087080] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d14e5f3aa8b34088af4938b11f4de5e3 [ 866.176979] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53ce827d-423d-4ae0-afcd-2be0664526f2 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.185328] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7164b1b-697c-4ad8-b989-023bb7be681d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.215904] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-536edea3-f5a9-4e3c-9186-a0ec5b4b9323 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.223314] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d840dec9-9b78-4a2f-85f7-1521d1e82527 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.236339] env[61273]: DEBUG nova.compute.provider_tree [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 866.236830] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg 8a39b2e8831448a394ea754360ae840a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 866.243801] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8a39b2e8831448a394ea754360ae840a [ 866.338684] env[61273]: INFO nova.scheduler.client.report [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Deleted allocations for instance 05df2575-9c3f-43d4-8fe4-52a808e11080 [ 866.350995] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg b7f87503c61c4115b194fe5325b743fe in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 866.361735] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b7f87503c61c4115b194fe5325b743fe [ 866.577899] env[61273]: DEBUG nova.network.neutron [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: ae9866e2-544a-4d26-b198-87110f42f054] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 866.578449] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg ad2c947ce7d74f8780075adb4491ed8d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 866.587005] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ad2c947ce7d74f8780075adb4491ed8d [ 866.739144] env[61273]: DEBUG nova.scheduler.client.report [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 866.741544] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg 6c0a017cf0bd49abaaaf20bf269cec03 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 866.755398] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6c0a017cf0bd49abaaaf20bf269cec03 [ 866.847145] env[61273]: DEBUG oslo_concurrency.lockutils [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Lock "05df2575-9c3f-43d4-8fe4-52a808e11080" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 97.087s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 866.847756] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] Expecting reply to msg 1b592b03f49d4716949e23fdd3672819 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 866.862959] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1b592b03f49d4716949e23fdd3672819 [ 867.081144] env[61273]: INFO nova.compute.manager [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: ae9866e2-544a-4d26-b198-87110f42f054] Took 1.03 seconds to deallocate network for instance. [ 867.082850] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg d6fd467ba4fb48429847684a79696b35 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 867.121831] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d6fd467ba4fb48429847684a79696b35 [ 867.243759] env[61273]: DEBUG oslo_concurrency.lockutils [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.301s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 867.244406] env[61273]: DEBUG nova.compute.manager [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 867.246070] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg f922a2b616f14824895966fb9995f91c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 867.247090] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 13.528s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 867.247826] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 72224458758d4a3e92fd668693c41fcc in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 867.270391] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 72224458758d4a3e92fd668693c41fcc [ 867.276934] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f922a2b616f14824895966fb9995f91c [ 867.350485] env[61273]: DEBUG nova.compute.manager [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 867.352338] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] Expecting reply to msg d301b37518fa422e8c1717b2b7f44342 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 867.397262] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d301b37518fa422e8c1717b2b7f44342 [ 867.587606] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg 5ea3b60489d141b3869e32253fd1c761 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 867.625080] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5ea3b60489d141b3869e32253fd1c761 [ 867.751914] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg bf3838cc03f742cca5f9c721f8aafa66 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 867.753583] env[61273]: DEBUG nova.compute.utils [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 867.754148] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg 3127d57eba9f4e23955c505c82e0af1c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 867.755070] env[61273]: DEBUG nova.compute.manager [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 867.755227] env[61273]: DEBUG nova.network.neutron [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 867.762651] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bf3838cc03f742cca5f9c721f8aafa66 [ 867.772448] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3127d57eba9f4e23955c505c82e0af1c [ 867.789801] env[61273]: DEBUG nova.policy [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f09e88112924400db81a4fbe611482f0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '70c9e6d7debd4d8e8cb7790975294a22', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 867.871782] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 868.093150] env[61273]: DEBUG nova.network.neutron [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] Successfully created port: 7fc6dd31-acd8-4eb7-952a-365a4a2614e1 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 868.112490] env[61273]: INFO nova.scheduler.client.report [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Deleted allocations for instance ae9866e2-544a-4d26-b198-87110f42f054 [ 868.119326] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg 094623b0fabb4010bbee7f806713adde in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 868.131567] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 094623b0fabb4010bbee7f806713adde [ 868.259563] env[61273]: DEBUG nova.compute.manager [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 868.261484] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg c1ccbaf8bfe54c0d9eed961b69474971 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 868.274345] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance b720f9f1-9401-40b1-978b-9b8eefe712ea actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 868.274826] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance 9952d347-2ca7-48f2-8ee1-dc1d767402dc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 868.275063] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance 1fde207b-9d32-4cff-b3fe-d0caddd20f69 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 868.275309] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance 6182ea84-d5f2-4f01-9091-3d7b0b096d7c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 868.275945] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg f4f2ca62af51477d86edd8a039df0208 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 868.286212] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f4f2ca62af51477d86edd8a039df0208 [ 868.295183] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c1ccbaf8bfe54c0d9eed961b69474971 [ 868.622228] env[61273]: DEBUG oslo_concurrency.lockutils [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Lock "ae9866e2-544a-4d26-b198-87110f42f054" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 98.815s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 868.622228] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] Expecting reply to msg f633cc5e34e3463fb39ac11e8a04640f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 868.632111] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f633cc5e34e3463fb39ac11e8a04640f [ 868.769349] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg 52fbcdaaa4e54a89ad16cee74f6054ff in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 868.779866] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance 27e43d79-6435-46fb-ac71-9be7313d591a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 868.779866] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 908445906b5546ef9c3775743f2460db in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 868.788901] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 908445906b5546ef9c3775743f2460db [ 868.800684] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 52fbcdaaa4e54a89ad16cee74f6054ff [ 868.824679] env[61273]: DEBUG nova.compute.manager [req-b40a4ae2-0ae3-4c34-923c-c79b53b9c8f9 req-968c0631-7480-4aac-a3e7-29c6c580cede service nova] [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] Received event network-changed-7fc6dd31-acd8-4eb7-952a-365a4a2614e1 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 868.824679] env[61273]: DEBUG nova.compute.manager [req-b40a4ae2-0ae3-4c34-923c-c79b53b9c8f9 req-968c0631-7480-4aac-a3e7-29c6c580cede service nova] [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] Refreshing instance network info cache due to event network-changed-7fc6dd31-acd8-4eb7-952a-365a4a2614e1. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 868.824679] env[61273]: DEBUG oslo_concurrency.lockutils [req-b40a4ae2-0ae3-4c34-923c-c79b53b9c8f9 req-968c0631-7480-4aac-a3e7-29c6c580cede service nova] Acquiring lock "refresh_cache-6182ea84-d5f2-4f01-9091-3d7b0b096d7c" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 868.824679] env[61273]: DEBUG oslo_concurrency.lockutils [req-b40a4ae2-0ae3-4c34-923c-c79b53b9c8f9 req-968c0631-7480-4aac-a3e7-29c6c580cede service nova] Acquired lock "refresh_cache-6182ea84-d5f2-4f01-9091-3d7b0b096d7c" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.824679] env[61273]: DEBUG nova.network.neutron [req-b40a4ae2-0ae3-4c34-923c-c79b53b9c8f9 req-968c0631-7480-4aac-a3e7-29c6c580cede service nova] [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] Refreshing network info cache for port 7fc6dd31-acd8-4eb7-952a-365a4a2614e1 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 868.824851] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-b40a4ae2-0ae3-4c34-923c-c79b53b9c8f9 req-968c0631-7480-4aac-a3e7-29c6c580cede service nova] Expecting reply to msg d62177c7afe1452e8e332b30d2b02680 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 868.830824] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d62177c7afe1452e8e332b30d2b02680 [ 869.017106] env[61273]: ERROR nova.compute.manager [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 7fc6dd31-acd8-4eb7-952a-365a4a2614e1, please check neutron logs for more information. [ 869.017106] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 869.017106] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 869.017106] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 869.017106] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 869.017106] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 869.017106] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 869.017106] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 869.017106] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 869.017106] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 869.017106] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 869.017106] env[61273]: ERROR nova.compute.manager raise self.value [ 869.017106] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 869.017106] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 869.017106] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 869.017106] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 869.017647] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 869.017647] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 869.017647] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 7fc6dd31-acd8-4eb7-952a-365a4a2614e1, please check neutron logs for more information. [ 869.017647] env[61273]: ERROR nova.compute.manager [ 869.017647] env[61273]: Traceback (most recent call last): [ 869.017647] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 869.017647] env[61273]: listener.cb(fileno) [ 869.017647] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 869.017647] env[61273]: result = function(*args, **kwargs) [ 869.017647] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 869.017647] env[61273]: return func(*args, **kwargs) [ 869.017647] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 869.017647] env[61273]: raise e [ 869.017647] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 869.017647] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 869.017647] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 869.017647] env[61273]: created_port_ids = self._update_ports_for_instance( [ 869.017647] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 869.017647] env[61273]: with excutils.save_and_reraise_exception(): [ 869.017647] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 869.017647] env[61273]: self.force_reraise() [ 869.017647] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 869.017647] env[61273]: raise self.value [ 869.017647] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 869.017647] env[61273]: updated_port = self._update_port( [ 869.017647] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 869.017647] env[61273]: _ensure_no_port_binding_failure(port) [ 869.017647] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 869.017647] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 869.018470] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 7fc6dd31-acd8-4eb7-952a-365a4a2614e1, please check neutron logs for more information. [ 869.018470] env[61273]: Removing descriptor: 15 [ 869.125027] env[61273]: DEBUG nova.compute.manager [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 869.126743] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] Expecting reply to msg dea1b012fca94a229769e064c6cd1690 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 869.166084] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dea1b012fca94a229769e064c6cd1690 [ 869.271400] env[61273]: DEBUG nova.compute.manager [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 869.281373] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance 9adae455-b609-4ecb-8841-43fb4d826f84 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 869.282040] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg d8610092f8af487eb1539d68d43a1145 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 869.293165] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d8610092f8af487eb1539d68d43a1145 [ 869.299466] env[61273]: DEBUG nova.virt.hardware [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 869.299723] env[61273]: DEBUG nova.virt.hardware [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 869.299882] env[61273]: DEBUG nova.virt.hardware [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 869.300076] env[61273]: DEBUG nova.virt.hardware [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 869.300215] env[61273]: DEBUG nova.virt.hardware [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 869.300358] env[61273]: DEBUG nova.virt.hardware [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 869.300557] env[61273]: DEBUG nova.virt.hardware [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 869.300707] env[61273]: DEBUG nova.virt.hardware [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 869.300867] env[61273]: DEBUG nova.virt.hardware [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 869.301024] env[61273]: DEBUG nova.virt.hardware [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 869.301188] env[61273]: DEBUG nova.virt.hardware [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 869.302323] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c25e0cb-75de-4e95-b03f-a9c4e022eb09 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.310393] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2626b49-9efc-45a2-873f-8c48705a418f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.324739] env[61273]: ERROR nova.compute.manager [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 7fc6dd31-acd8-4eb7-952a-365a4a2614e1, please check neutron logs for more information. [ 869.324739] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] Traceback (most recent call last): [ 869.324739] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 869.324739] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] yield resources [ 869.324739] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 869.324739] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] self.driver.spawn(context, instance, image_meta, [ 869.324739] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 869.324739] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 869.324739] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 869.324739] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] vm_ref = self.build_virtual_machine(instance, [ 869.324739] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 869.325220] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] vif_infos = vmwarevif.get_vif_info(self._session, [ 869.325220] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 869.325220] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] for vif in network_info: [ 869.325220] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 869.325220] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] return self._sync_wrapper(fn, *args, **kwargs) [ 869.325220] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 869.325220] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] self.wait() [ 869.325220] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 869.325220] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] self[:] = self._gt.wait() [ 869.325220] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 869.325220] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] return self._exit_event.wait() [ 869.325220] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 869.325220] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] current.throw(*self._exc) [ 869.325791] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 869.325791] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] result = function(*args, **kwargs) [ 869.325791] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 869.325791] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] return func(*args, **kwargs) [ 869.325791] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 869.325791] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] raise e [ 869.325791] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 869.325791] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] nwinfo = self.network_api.allocate_for_instance( [ 869.325791] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 869.325791] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] created_port_ids = self._update_ports_for_instance( [ 869.325791] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 869.325791] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] with excutils.save_and_reraise_exception(): [ 869.325791] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 869.326229] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] self.force_reraise() [ 869.326229] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 869.326229] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] raise self.value [ 869.326229] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 869.326229] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] updated_port = self._update_port( [ 869.326229] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 869.326229] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] _ensure_no_port_binding_failure(port) [ 869.326229] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 869.326229] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] raise exception.PortBindingFailed(port_id=port['id']) [ 869.326229] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] nova.exception.PortBindingFailed: Binding failed for port 7fc6dd31-acd8-4eb7-952a-365a4a2614e1, please check neutron logs for more information. [ 869.326229] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] [ 869.326229] env[61273]: INFO nova.compute.manager [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] Terminating instance [ 869.328964] env[61273]: DEBUG oslo_concurrency.lockutils [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Acquiring lock "refresh_cache-6182ea84-d5f2-4f01-9091-3d7b0b096d7c" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 869.349990] env[61273]: DEBUG nova.network.neutron [req-b40a4ae2-0ae3-4c34-923c-c79b53b9c8f9 req-968c0631-7480-4aac-a3e7-29c6c580cede service nova] [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 869.483412] env[61273]: DEBUG nova.network.neutron [req-b40a4ae2-0ae3-4c34-923c-c79b53b9c8f9 req-968c0631-7480-4aac-a3e7-29c6c580cede service nova] [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 869.483942] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-b40a4ae2-0ae3-4c34-923c-c79b53b9c8f9 req-968c0631-7480-4aac-a3e7-29c6c580cede service nova] Expecting reply to msg dd4b24216708448fa25334a8c20367ce in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 869.492680] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dd4b24216708448fa25334a8c20367ce [ 869.651984] env[61273]: DEBUG oslo_concurrency.lockutils [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 869.785091] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance 9cedc314-173e-4686-8ee5-28c2512dbcba has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 869.785732] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 40acdcdbbfd441f58ffc0508f6cfd9cb in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 869.800108] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 40acdcdbbfd441f58ffc0508f6cfd9cb [ 869.986041] env[61273]: DEBUG oslo_concurrency.lockutils [req-b40a4ae2-0ae3-4c34-923c-c79b53b9c8f9 req-968c0631-7480-4aac-a3e7-29c6c580cede service nova] Releasing lock "refresh_cache-6182ea84-d5f2-4f01-9091-3d7b0b096d7c" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 869.986452] env[61273]: DEBUG oslo_concurrency.lockutils [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Acquired lock "refresh_cache-6182ea84-d5f2-4f01-9091-3d7b0b096d7c" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 869.986641] env[61273]: DEBUG nova.network.neutron [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 869.987078] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg 619c07fb45ad4cda9476228ae3386fc4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 869.997158] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 619c07fb45ad4cda9476228ae3386fc4 [ 870.288743] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance 31ab5ebd-3df1-4e9f-bf53-69d47176da01 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 870.289310] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 2f977172ca444252972bb4f97c16f1fd in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 870.299865] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2f977172ca444252972bb4f97c16f1fd [ 870.505091] env[61273]: DEBUG nova.network.neutron [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 870.583425] env[61273]: DEBUG nova.network.neutron [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 870.583951] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg bd730695588346d99424528a4f630d24 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 870.598058] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bd730695588346d99424528a4f630d24 [ 870.791558] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 870.792216] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg f15bcb925c47454ebeb66399627bae38 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 870.804702] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f15bcb925c47454ebeb66399627bae38 [ 870.853165] env[61273]: DEBUG nova.compute.manager [req-4df00c24-d5a8-4cfd-bdec-5016c95ae6c9 req-c30c7115-3f11-4342-b704-e949b9e23cf8 service nova] [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] Received event network-vif-deleted-7fc6dd31-acd8-4eb7-952a-365a4a2614e1 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 871.089809] env[61273]: DEBUG oslo_concurrency.lockutils [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Releasing lock "refresh_cache-6182ea84-d5f2-4f01-9091-3d7b0b096d7c" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 871.090270] env[61273]: DEBUG nova.compute.manager [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 871.090467] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 871.090798] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-36b5cbdc-0818-4d1b-bd62-8d2826e0ce35 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.099713] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a2c281e-c03a-4ad2-8d12-cf07c7e951ad {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.122897] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6182ea84-d5f2-4f01-9091-3d7b0b096d7c could not be found. [ 871.123853] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 871.124121] env[61273]: INFO nova.compute.manager [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] Took 0.03 seconds to destroy the instance on the hypervisor. [ 871.124503] env[61273]: DEBUG oslo.service.loopingcall [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 871.124616] env[61273]: DEBUG nova.compute.manager [-] [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 871.124716] env[61273]: DEBUG nova.network.neutron [-] [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 871.144985] env[61273]: DEBUG nova.network.neutron [-] [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 871.146187] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 839fc45c70be4bd0aaf712220d7b9429 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 871.155280] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 839fc45c70be4bd0aaf712220d7b9429 [ 871.294348] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance d3dafd33-91f8-481d-8f40-8c2e98a7587d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 871.294876] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 7b2925a560ea409fa4c537a4a1bd04a9 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 871.307288] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7b2925a560ea409fa4c537a4a1bd04a9 [ 871.647722] env[61273]: DEBUG nova.network.neutron [-] [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 871.648218] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 82daa6ce27bc4952bf3ae5df13d7402b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 871.685227] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 82daa6ce27bc4952bf3ae5df13d7402b [ 871.796934] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance f9e23014-2fe0-4aab-b03c-8759dc1e5eb0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 871.797553] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg e2b086d5221e4a4ca71ffb28e1fa7804 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 871.808222] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e2b086d5221e4a4ca71ffb28e1fa7804 [ 872.150091] env[61273]: INFO nova.compute.manager [-] [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] Took 1.03 seconds to deallocate network for instance. [ 872.152505] env[61273]: DEBUG nova.compute.claims [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 872.152698] env[61273]: DEBUG oslo_concurrency.lockutils [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 872.299959] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance d4a2025d-c128-45a2-b74c-a7fd2630d615 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 872.300553] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg c616eb267a62446aa78e25705e00cde7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 872.310731] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c616eb267a62446aa78e25705e00cde7 [ 872.802792] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 872.803103] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=61273) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 872.803197] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=61273) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 872.976844] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de3ea85d-a8d2-41b1-889a-71f4e1920702 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.985431] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79cea7cf-fd31-41ab-9bd8-bdb764acd277 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.014509] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50be103a-96c7-4182-a592-2d69c0a344e1 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.021909] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f28068a-f51f-49ce-ac5d-9680a19b0701 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.034740] env[61273]: DEBUG nova.compute.provider_tree [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 873.035199] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg f353a36dc61e480680399f7e73a55a58 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 873.043041] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f353a36dc61e480680399f7e73a55a58 [ 873.537756] env[61273]: DEBUG nova.scheduler.client.report [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 873.540052] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 91898847997d40c79ceef749bbac8e87 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 873.553768] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 91898847997d40c79ceef749bbac8e87 [ 874.042692] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61273) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 874.043058] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 6.796s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 874.043253] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.323s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 874.044971] env[61273]: INFO nova.compute.claims [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 874.046621] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 1b5bf0d39b6d48619e29f277f2308327 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 874.084606] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1b5bf0d39b6d48619e29f277f2308327 [ 874.550307] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 2d1909c3b2a14e0e92ecbcb059c7b4ac in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 874.558002] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2d1909c3b2a14e0e92ecbcb059c7b4ac [ 875.194549] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce55e59d-fc91-4d4e-b604-26fa98cc3d92 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.201648] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc144be8-367d-4905-b908-4bd188ad6be9 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.230836] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f1c4226-96ef-467e-8ba4-ffabe22bb746 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.237471] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-815931b9-569e-4517-9970-9198d1d42c8e {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.249991] env[61273]: DEBUG nova.compute.provider_tree [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 875.250480] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 50923ea5cd7a43119c73a79cf403b9da in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 875.257120] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 50923ea5cd7a43119c73a79cf403b9da [ 875.753750] env[61273]: DEBUG nova.scheduler.client.report [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 875.756131] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 01ecef0ffebc4fffb36efbc78546bcac in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 875.766661] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 01ecef0ffebc4fffb36efbc78546bcac [ 876.259310] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.216s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 876.259863] env[61273]: DEBUG nova.compute.manager [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 876.261542] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 2ec8b6e59e2747d980316c84dcc35f64 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 876.262636] env[61273]: DEBUG oslo_concurrency.lockutils [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 19.880s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 876.264352] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg 9600020a15484095bee59e117b887204 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 876.291988] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2ec8b6e59e2747d980316c84dcc35f64 [ 876.292945] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9600020a15484095bee59e117b887204 [ 876.768024] env[61273]: DEBUG nova.compute.utils [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 876.768782] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 68a6e6459e4740ccb6e0fbb2558fcee6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 876.773564] env[61273]: DEBUG nova.compute.manager [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 876.773743] env[61273]: DEBUG nova.network.neutron [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 876.781524] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 68a6e6459e4740ccb6e0fbb2558fcee6 [ 876.819157] env[61273]: DEBUG nova.policy [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'af34c4e3d81c4729a9dd4a8531992ff1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd9325f1def284d2a9fdced4e9eeb17f0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 876.924245] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc5d2982-21cc-4b33-9c2a-38191023ab48 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.931834] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c535a83-35e3-4f47-af5e-d6e478076345 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.962942] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e4e918b-79e4-4cab-9d95-eac9768090b9 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.970430] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfd309a9-eeb6-4ce8-8f9d-f20d8debb0fc {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.990952] env[61273]: DEBUG nova.compute.provider_tree [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 876.991476] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg c4392278fc8547e583ba964191deca4b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 876.999356] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c4392278fc8547e583ba964191deca4b [ 877.073324] env[61273]: DEBUG nova.network.neutron [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] Successfully created port: 038957b1-b320-43d4-bfde-f11c360acfb3 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 877.274604] env[61273]: DEBUG nova.compute.manager [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 877.276329] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg c93f839c4e874e269275697657c0f231 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 877.306157] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c93f839c4e874e269275697657c0f231 [ 877.494140] env[61273]: DEBUG nova.scheduler.client.report [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 877.496439] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg 07f10d510af6478eb7615614c4807853 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 877.508582] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 07f10d510af6478eb7615614c4807853 [ 877.648439] env[61273]: DEBUG nova.compute.manager [req-e47adf02-7302-4fb6-824b-1179c477ff1a req-bd5b96dc-8c2f-4bda-ab07-19ae44d30906 service nova] [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] Received event network-changed-038957b1-b320-43d4-bfde-f11c360acfb3 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 877.648634] env[61273]: DEBUG nova.compute.manager [req-e47adf02-7302-4fb6-824b-1179c477ff1a req-bd5b96dc-8c2f-4bda-ab07-19ae44d30906 service nova] [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] Refreshing instance network info cache due to event network-changed-038957b1-b320-43d4-bfde-f11c360acfb3. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 877.648861] env[61273]: DEBUG oslo_concurrency.lockutils [req-e47adf02-7302-4fb6-824b-1179c477ff1a req-bd5b96dc-8c2f-4bda-ab07-19ae44d30906 service nova] Acquiring lock "refresh_cache-27e43d79-6435-46fb-ac71-9be7313d591a" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 877.649041] env[61273]: DEBUG oslo_concurrency.lockutils [req-e47adf02-7302-4fb6-824b-1179c477ff1a req-bd5b96dc-8c2f-4bda-ab07-19ae44d30906 service nova] Acquired lock "refresh_cache-27e43d79-6435-46fb-ac71-9be7313d591a" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 877.649213] env[61273]: DEBUG nova.network.neutron [req-e47adf02-7302-4fb6-824b-1179c477ff1a req-bd5b96dc-8c2f-4bda-ab07-19ae44d30906 service nova] [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] Refreshing network info cache for port 038957b1-b320-43d4-bfde-f11c360acfb3 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 877.652376] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-e47adf02-7302-4fb6-824b-1179c477ff1a req-bd5b96dc-8c2f-4bda-ab07-19ae44d30906 service nova] Expecting reply to msg 541ebb85a2fa4623b5f7714b1febffa5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 877.656793] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 541ebb85a2fa4623b5f7714b1febffa5 [ 877.781347] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 1e0f6c90a5eb4228b72d8dfc97d7e028 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 877.825279] env[61273]: ERROR nova.compute.manager [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 038957b1-b320-43d4-bfde-f11c360acfb3, please check neutron logs for more information. [ 877.825279] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 877.825279] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 877.825279] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 877.825279] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 877.825279] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 877.825279] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 877.825279] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 877.825279] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 877.825279] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 877.825279] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 877.825279] env[61273]: ERROR nova.compute.manager raise self.value [ 877.825279] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 877.825279] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 877.825279] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 877.825279] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 877.825920] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 877.825920] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 877.825920] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 038957b1-b320-43d4-bfde-f11c360acfb3, please check neutron logs for more information. [ 877.825920] env[61273]: ERROR nova.compute.manager [ 877.825920] env[61273]: Traceback (most recent call last): [ 877.825920] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 877.825920] env[61273]: listener.cb(fileno) [ 877.825920] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 877.825920] env[61273]: result = function(*args, **kwargs) [ 877.825920] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 877.825920] env[61273]: return func(*args, **kwargs) [ 877.825920] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 877.825920] env[61273]: raise e [ 877.825920] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 877.825920] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 877.825920] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 877.825920] env[61273]: created_port_ids = self._update_ports_for_instance( [ 877.825920] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 877.825920] env[61273]: with excutils.save_and_reraise_exception(): [ 877.825920] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 877.825920] env[61273]: self.force_reraise() [ 877.825920] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 877.825920] env[61273]: raise self.value [ 877.825920] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 877.825920] env[61273]: updated_port = self._update_port( [ 877.825920] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 877.825920] env[61273]: _ensure_no_port_binding_failure(port) [ 877.825920] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 877.825920] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 877.826697] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 038957b1-b320-43d4-bfde-f11c360acfb3, please check neutron logs for more information. [ 877.826697] env[61273]: Removing descriptor: 15 [ 877.826697] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1e0f6c90a5eb4228b72d8dfc97d7e028 [ 877.999263] env[61273]: DEBUG oslo_concurrency.lockutils [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.736s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 878.000154] env[61273]: ERROR nova.compute.manager [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port f0f1a201-d4ee-4abf-a2c8-f33f9a7b67f6, please check neutron logs for more information. [ 878.000154] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] Traceback (most recent call last): [ 878.000154] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 878.000154] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] self.driver.spawn(context, instance, image_meta, [ 878.000154] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 878.000154] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] self._vmops.spawn(context, instance, image_meta, injected_files, [ 878.000154] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 878.000154] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] vm_ref = self.build_virtual_machine(instance, [ 878.000154] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 878.000154] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] vif_infos = vmwarevif.get_vif_info(self._session, [ 878.000154] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 878.000496] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] for vif in network_info: [ 878.000496] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 878.000496] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] return self._sync_wrapper(fn, *args, **kwargs) [ 878.000496] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 878.000496] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] self.wait() [ 878.000496] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 878.000496] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] self[:] = self._gt.wait() [ 878.000496] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 878.000496] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] return self._exit_event.wait() [ 878.000496] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 878.000496] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] current.throw(*self._exc) [ 878.000496] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 878.000496] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] result = function(*args, **kwargs) [ 878.000853] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 878.000853] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] return func(*args, **kwargs) [ 878.000853] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 878.000853] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] raise e [ 878.000853] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 878.000853] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] nwinfo = self.network_api.allocate_for_instance( [ 878.000853] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 878.000853] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] created_port_ids = self._update_ports_for_instance( [ 878.000853] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 878.000853] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] with excutils.save_and_reraise_exception(): [ 878.000853] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 878.000853] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] self.force_reraise() [ 878.000853] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 878.001187] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] raise self.value [ 878.001187] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 878.001187] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] updated_port = self._update_port( [ 878.001187] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 878.001187] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] _ensure_no_port_binding_failure(port) [ 878.001187] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 878.001187] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] raise exception.PortBindingFailed(port_id=port['id']) [ 878.001187] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] nova.exception.PortBindingFailed: Binding failed for port f0f1a201-d4ee-4abf-a2c8-f33f9a7b67f6, please check neutron logs for more information. [ 878.001187] env[61273]: ERROR nova.compute.manager [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] [ 878.001987] env[61273]: DEBUG nova.compute.utils [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] Binding failed for port f0f1a201-d4ee-4abf-a2c8-f33f9a7b67f6, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 878.003500] env[61273]: DEBUG oslo_concurrency.lockutils [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 19.600s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 878.005389] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg ccee42a9f4e74184a9d262ca8bf5e012 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 878.017383] env[61273]: DEBUG nova.compute.manager [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] Build of instance b720f9f1-9401-40b1-978b-9b8eefe712ea was re-scheduled: Binding failed for port f0f1a201-d4ee-4abf-a2c8-f33f9a7b67f6, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 878.020157] env[61273]: DEBUG nova.compute.manager [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 878.020480] env[61273]: DEBUG oslo_concurrency.lockutils [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Acquiring lock "refresh_cache-b720f9f1-9401-40b1-978b-9b8eefe712ea" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 878.020690] env[61273]: DEBUG oslo_concurrency.lockutils [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Acquired lock "refresh_cache-b720f9f1-9401-40b1-978b-9b8eefe712ea" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.020900] env[61273]: DEBUG nova.network.neutron [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 878.021426] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg 9aeeb36dc1ed4d6f85ffca7074da1c72 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 878.028556] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9aeeb36dc1ed4d6f85ffca7074da1c72 [ 878.040483] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ccee42a9f4e74184a9d262ca8bf5e012 [ 878.166261] env[61273]: DEBUG nova.network.neutron [req-e47adf02-7302-4fb6-824b-1179c477ff1a req-bd5b96dc-8c2f-4bda-ab07-19ae44d30906 service nova] [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 878.243947] env[61273]: DEBUG nova.network.neutron [req-e47adf02-7302-4fb6-824b-1179c477ff1a req-bd5b96dc-8c2f-4bda-ab07-19ae44d30906 service nova] [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 878.244648] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-e47adf02-7302-4fb6-824b-1179c477ff1a req-bd5b96dc-8c2f-4bda-ab07-19ae44d30906 service nova] Expecting reply to msg 9aca03c889d54d8d9e4e2420393b74ce in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 878.253006] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9aca03c889d54d8d9e4e2420393b74ce [ 878.284434] env[61273]: DEBUG nova.compute.manager [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 878.309622] env[61273]: DEBUG nova.virt.hardware [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 878.309947] env[61273]: DEBUG nova.virt.hardware [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 878.310177] env[61273]: DEBUG nova.virt.hardware [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 878.310429] env[61273]: DEBUG nova.virt.hardware [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 878.310639] env[61273]: DEBUG nova.virt.hardware [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 878.310856] env[61273]: DEBUG nova.virt.hardware [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 878.311119] env[61273]: DEBUG nova.virt.hardware [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 878.311341] env[61273]: DEBUG nova.virt.hardware [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 878.311571] env[61273]: DEBUG nova.virt.hardware [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 878.311834] env[61273]: DEBUG nova.virt.hardware [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 878.312085] env[61273]: DEBUG nova.virt.hardware [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 878.313103] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c18fe830-b18f-4293-922d-e4cfe2a783a8 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.321508] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49f470d2-21c7-4523-a4b5-9c802db0d398 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.335424] env[61273]: ERROR nova.compute.manager [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 038957b1-b320-43d4-bfde-f11c360acfb3, please check neutron logs for more information. [ 878.335424] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] Traceback (most recent call last): [ 878.335424] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 878.335424] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] yield resources [ 878.335424] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 878.335424] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] self.driver.spawn(context, instance, image_meta, [ 878.335424] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 878.335424] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 878.335424] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 878.335424] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] vm_ref = self.build_virtual_machine(instance, [ 878.335424] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 878.335843] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] vif_infos = vmwarevif.get_vif_info(self._session, [ 878.335843] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 878.335843] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] for vif in network_info: [ 878.335843] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 878.335843] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] return self._sync_wrapper(fn, *args, **kwargs) [ 878.335843] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 878.335843] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] self.wait() [ 878.335843] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 878.335843] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] self[:] = self._gt.wait() [ 878.335843] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 878.335843] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] return self._exit_event.wait() [ 878.335843] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 878.335843] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] current.throw(*self._exc) [ 878.336262] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 878.336262] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] result = function(*args, **kwargs) [ 878.336262] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 878.336262] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] return func(*args, **kwargs) [ 878.336262] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 878.336262] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] raise e [ 878.336262] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 878.336262] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] nwinfo = self.network_api.allocate_for_instance( [ 878.336262] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 878.336262] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] created_port_ids = self._update_ports_for_instance( [ 878.336262] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 878.336262] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] with excutils.save_and_reraise_exception(): [ 878.336262] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 878.336649] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] self.force_reraise() [ 878.336649] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 878.336649] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] raise self.value [ 878.336649] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 878.336649] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] updated_port = self._update_port( [ 878.336649] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 878.336649] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] _ensure_no_port_binding_failure(port) [ 878.336649] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 878.336649] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] raise exception.PortBindingFailed(port_id=port['id']) [ 878.336649] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] nova.exception.PortBindingFailed: Binding failed for port 038957b1-b320-43d4-bfde-f11c360acfb3, please check neutron logs for more information. [ 878.336649] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] [ 878.336649] env[61273]: INFO nova.compute.manager [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] Terminating instance [ 878.338667] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Acquiring lock "refresh_cache-27e43d79-6435-46fb-ac71-9be7313d591a" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 878.541191] env[61273]: DEBUG nova.network.neutron [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 878.653688] env[61273]: DEBUG nova.network.neutron [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 878.653688] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg 5c825cda449643dab98d2fb9dd66c6e8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 878.662783] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5c825cda449643dab98d2fb9dd66c6e8 [ 878.705723] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aca19ee9-5a88-4525-9fbb-df673dd2315d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.713837] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4793c65e-9215-4425-9a60-1a8a7e59e06f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.742764] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cfcead4-e9b8-4341-87c3-3595af790678 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.747047] env[61273]: DEBUG oslo_concurrency.lockutils [req-e47adf02-7302-4fb6-824b-1179c477ff1a req-bd5b96dc-8c2f-4bda-ab07-19ae44d30906 service nova] Releasing lock "refresh_cache-27e43d79-6435-46fb-ac71-9be7313d591a" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 878.747559] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Acquired lock "refresh_cache-27e43d79-6435-46fb-ac71-9be7313d591a" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.747861] env[61273]: DEBUG nova.network.neutron [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 878.748420] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg bd8a4241bb8c4744894294248511d37c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 878.752121] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dcfdd81-94d3-4145-aa85-2ab76d69c076 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.756322] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bd8a4241bb8c4744894294248511d37c [ 878.766247] env[61273]: DEBUG nova.compute.provider_tree [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 878.766848] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 8443f1ce0c014ecc9e007a6ba3bf5933 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 878.773303] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8443f1ce0c014ecc9e007a6ba3bf5933 [ 879.159060] env[61273]: DEBUG oslo_concurrency.lockutils [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Releasing lock "refresh_cache-b720f9f1-9401-40b1-978b-9b8eefe712ea" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 879.159060] env[61273]: DEBUG nova.compute.manager [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 879.159060] env[61273]: DEBUG nova.compute.manager [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 879.159060] env[61273]: DEBUG nova.network.neutron [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 879.175431] env[61273]: DEBUG nova.network.neutron [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 879.175431] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg 6bc83d7f5e774345ac098e127ef20e85 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 879.182299] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6bc83d7f5e774345ac098e127ef20e85 [ 879.268237] env[61273]: DEBUG nova.network.neutron [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 879.270218] env[61273]: DEBUG nova.scheduler.client.report [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 879.272841] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg a0e3a68fb59a48c9b04706af37c790f5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 879.283972] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a0e3a68fb59a48c9b04706af37c790f5 [ 879.334577] env[61273]: DEBUG nova.network.neutron [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 879.335475] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg a7328a60eba94f6f9e655f542563ae74 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 879.343583] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a7328a60eba94f6f9e655f542563ae74 [ 879.680511] env[61273]: DEBUG nova.network.neutron [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 879.680511] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg 1effd2c3c4ae4b569293f68c56ca3fb6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 879.680511] env[61273]: DEBUG nova.compute.manager [req-21e180ed-3bd3-4e88-a743-5a81a8a7b004 req-6a429a2a-9231-4dd0-8d98-b18e7a32adf3 service nova] [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] Received event network-vif-deleted-038957b1-b320-43d4-bfde-f11c360acfb3 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 879.686666] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1effd2c3c4ae4b569293f68c56ca3fb6 [ 879.775384] env[61273]: DEBUG oslo_concurrency.lockutils [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.771s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 879.775603] env[61273]: ERROR nova.compute.manager [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 94ad734d-164b-4d36-83a3-e69aa8777538, please check neutron logs for more information. [ 879.775603] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] Traceback (most recent call last): [ 879.775603] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 879.775603] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] self.driver.spawn(context, instance, image_meta, [ 879.775603] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 879.775603] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 879.775603] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 879.775603] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] vm_ref = self.build_virtual_machine(instance, [ 879.775603] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 879.775603] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] vif_infos = vmwarevif.get_vif_info(self._session, [ 879.775603] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 879.775943] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] for vif in network_info: [ 879.775943] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 879.775943] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] return self._sync_wrapper(fn, *args, **kwargs) [ 879.775943] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 879.775943] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] self.wait() [ 879.775943] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 879.775943] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] self[:] = self._gt.wait() [ 879.775943] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 879.775943] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] return self._exit_event.wait() [ 879.775943] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 879.775943] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] current.throw(*self._exc) [ 879.775943] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 879.775943] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] result = function(*args, **kwargs) [ 879.776433] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 879.776433] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] return func(*args, **kwargs) [ 879.776433] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 879.776433] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] raise e [ 879.776433] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 879.776433] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] nwinfo = self.network_api.allocate_for_instance( [ 879.776433] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 879.776433] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] created_port_ids = self._update_ports_for_instance( [ 879.776433] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 879.776433] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] with excutils.save_and_reraise_exception(): [ 879.776433] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 879.776433] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] self.force_reraise() [ 879.776433] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 879.776745] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] raise self.value [ 879.776745] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 879.776745] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] updated_port = self._update_port( [ 879.776745] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 879.776745] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] _ensure_no_port_binding_failure(port) [ 879.776745] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 879.776745] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] raise exception.PortBindingFailed(port_id=port['id']) [ 879.776745] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] nova.exception.PortBindingFailed: Binding failed for port 94ad734d-164b-4d36-83a3-e69aa8777538, please check neutron logs for more information. [ 879.776745] env[61273]: ERROR nova.compute.manager [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] [ 879.776745] env[61273]: DEBUG nova.compute.utils [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] Binding failed for port 94ad734d-164b-4d36-83a3-e69aa8777538, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 879.777642] env[61273]: DEBUG oslo_concurrency.lockutils [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 19.601s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 879.779296] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg b411931c725d4c2086328e06ba72a52c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 879.780444] env[61273]: DEBUG nova.compute.manager [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] Build of instance 9952d347-2ca7-48f2-8ee1-dc1d767402dc was re-scheduled: Binding failed for port 94ad734d-164b-4d36-83a3-e69aa8777538, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 879.780852] env[61273]: DEBUG nova.compute.manager [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 879.781071] env[61273]: DEBUG oslo_concurrency.lockutils [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Acquiring lock "refresh_cache-9952d347-2ca7-48f2-8ee1-dc1d767402dc" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 879.781287] env[61273]: DEBUG oslo_concurrency.lockutils [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Acquired lock "refresh_cache-9952d347-2ca7-48f2-8ee1-dc1d767402dc" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 879.781457] env[61273]: DEBUG nova.network.neutron [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 879.781944] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 83f6bee6f1ca4203bd00b3c191e97a54 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 879.788617] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 83f6bee6f1ca4203bd00b3c191e97a54 [ 879.808746] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b411931c725d4c2086328e06ba72a52c [ 879.837310] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Releasing lock "refresh_cache-27e43d79-6435-46fb-ac71-9be7313d591a" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 879.837740] env[61273]: DEBUG nova.compute.manager [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 879.837977] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 879.838297] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fce7e2e3-1c7c-47ff-a3b8-56ba6dd0267f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.847056] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c9fba7a-2945-4e1b-9892-9d57b4fe82ea {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.868034] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 27e43d79-6435-46fb-ac71-9be7313d591a could not be found. [ 879.868305] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 879.868553] env[61273]: INFO nova.compute.manager [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] Took 0.03 seconds to destroy the instance on the hypervisor. [ 879.868837] env[61273]: DEBUG oslo.service.loopingcall [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 879.869100] env[61273]: DEBUG nova.compute.manager [-] [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 879.869222] env[61273]: DEBUG nova.network.neutron [-] [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 879.884574] env[61273]: DEBUG nova.network.neutron [-] [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 879.885027] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 2d32e169914b478aa3f2cd19e38afa71 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 879.891300] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2d32e169914b478aa3f2cd19e38afa71 [ 880.180873] env[61273]: INFO nova.compute.manager [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] [instance: b720f9f1-9401-40b1-978b-9b8eefe712ea] Took 1.02 seconds to deallocate network for instance. [ 880.183707] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg 49c06508570144608316102be7636fbf in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 880.215545] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 49c06508570144608316102be7636fbf [ 880.303997] env[61273]: DEBUG nova.network.neutron [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 880.366496] env[61273]: DEBUG nova.network.neutron [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.367013] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg fbdcfa9ce4094c87af36b703d229afd0 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 880.375514] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fbdcfa9ce4094c87af36b703d229afd0 [ 880.386606] env[61273]: DEBUG nova.network.neutron [-] [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.386994] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 4efd2628ed114ba098884158992ae1a9 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 880.394811] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4efd2628ed114ba098884158992ae1a9 [ 880.434373] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2acb74d-b20a-45bb-abaa-30f0046f6066 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.441783] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77ace1e6-8080-41cc-a979-2fdb710e8aec {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.471419] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a092f1c0-724c-49cf-8fb3-262fa67c7a42 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.478139] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99af830d-4948-4c1f-86ed-5eb12b8642d4 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.490851] env[61273]: DEBUG nova.compute.provider_tree [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 880.491319] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg 07ca89ec8288442a97a3df8169612dc2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 880.498201] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 07ca89ec8288442a97a3df8169612dc2 [ 880.691345] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg 45fe8a68d8ec4deb908ae6d1eee1d185 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 880.724324] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 45fe8a68d8ec4deb908ae6d1eee1d185 [ 880.869206] env[61273]: DEBUG oslo_concurrency.lockutils [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Releasing lock "refresh_cache-9952d347-2ca7-48f2-8ee1-dc1d767402dc" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 880.869449] env[61273]: DEBUG nova.compute.manager [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 880.869726] env[61273]: DEBUG nova.compute.manager [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 880.869916] env[61273]: DEBUG nova.network.neutron [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 880.885380] env[61273]: DEBUG nova.network.neutron [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 880.886109] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 37eba001863544e3898d6e74b1e3dbfd in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 880.889001] env[61273]: INFO nova.compute.manager [-] [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] Took 1.02 seconds to deallocate network for instance. [ 880.891817] env[61273]: DEBUG nova.compute.claims [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 880.891817] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 880.892770] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 37eba001863544e3898d6e74b1e3dbfd [ 880.994055] env[61273]: DEBUG nova.scheduler.client.report [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 880.996617] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg 7d4d055069ba42f88100191c48df3883 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 881.009794] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7d4d055069ba42f88100191c48df3883 [ 881.214080] env[61273]: INFO nova.scheduler.client.report [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Deleted allocations for instance b720f9f1-9401-40b1-978b-9b8eefe712ea [ 881.221123] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Expecting reply to msg a94e513fa3824a1b839f78789dd0a994 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 881.234464] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a94e513fa3824a1b839f78789dd0a994 [ 881.388561] env[61273]: DEBUG nova.network.neutron [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 881.389095] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 635b45af11a94f00a9e4f49b220a86ff in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 881.396553] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 635b45af11a94f00a9e4f49b220a86ff [ 881.498803] env[61273]: DEBUG oslo_concurrency.lockutils [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.721s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 881.499383] env[61273]: ERROR nova.compute.manager [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 700bf580-c47d-4fd9-8067-c5c7564d83b0, please check neutron logs for more information. [ 881.499383] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] Traceback (most recent call last): [ 881.499383] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 881.499383] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] self.driver.spawn(context, instance, image_meta, [ 881.499383] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 881.499383] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] self._vmops.spawn(context, instance, image_meta, injected_files, [ 881.499383] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 881.499383] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] vm_ref = self.build_virtual_machine(instance, [ 881.499383] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 881.499383] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] vif_infos = vmwarevif.get_vif_info(self._session, [ 881.499383] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 881.499767] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] for vif in network_info: [ 881.499767] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 881.499767] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] return self._sync_wrapper(fn, *args, **kwargs) [ 881.499767] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 881.499767] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] self.wait() [ 881.499767] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 881.499767] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] self[:] = self._gt.wait() [ 881.499767] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 881.499767] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] return self._exit_event.wait() [ 881.499767] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 881.499767] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] current.throw(*self._exc) [ 881.499767] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 881.499767] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] result = function(*args, **kwargs) [ 881.500160] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 881.500160] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] return func(*args, **kwargs) [ 881.500160] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 881.500160] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] raise e [ 881.500160] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 881.500160] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] nwinfo = self.network_api.allocate_for_instance( [ 881.500160] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 881.500160] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] created_port_ids = self._update_ports_for_instance( [ 881.500160] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 881.500160] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] with excutils.save_and_reraise_exception(): [ 881.500160] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 881.500160] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] self.force_reraise() [ 881.500160] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 881.500529] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] raise self.value [ 881.500529] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 881.500529] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] updated_port = self._update_port( [ 881.500529] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 881.500529] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] _ensure_no_port_binding_failure(port) [ 881.500529] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 881.500529] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] raise exception.PortBindingFailed(port_id=port['id']) [ 881.500529] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] nova.exception.PortBindingFailed: Binding failed for port 700bf580-c47d-4fd9-8067-c5c7564d83b0, please check neutron logs for more information. [ 881.500529] env[61273]: ERROR nova.compute.manager [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] [ 881.500529] env[61273]: DEBUG nova.compute.utils [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] Binding failed for port 700bf580-c47d-4fd9-8067-c5c7564d83b0, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 881.501247] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.011s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 881.502717] env[61273]: INFO nova.compute.claims [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 881.504232] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Expecting reply to msg b64fb53204c14da2b16d585f5701dcba in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 881.505366] env[61273]: DEBUG nova.compute.manager [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] Build of instance 1fde207b-9d32-4cff-b3fe-d0caddd20f69 was re-scheduled: Binding failed for port 700bf580-c47d-4fd9-8067-c5c7564d83b0, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 881.505775] env[61273]: DEBUG nova.compute.manager [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 881.505996] env[61273]: DEBUG oslo_concurrency.lockutils [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Acquiring lock "refresh_cache-1fde207b-9d32-4cff-b3fe-d0caddd20f69" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 881.506141] env[61273]: DEBUG oslo_concurrency.lockutils [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Acquired lock "refresh_cache-1fde207b-9d32-4cff-b3fe-d0caddd20f69" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 881.506300] env[61273]: DEBUG nova.network.neutron [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 881.506653] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg d8f00e41ba254b07baf0913a3767b3f8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 881.512257] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d8f00e41ba254b07baf0913a3767b3f8 [ 881.536027] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b64fb53204c14da2b16d585f5701dcba [ 881.723526] env[61273]: DEBUG oslo_concurrency.lockutils [None req-959d6159-e58d-4679-8e3f-1badec2bc5fb tempest-ListServersNegativeTestJSON-1980913997 tempest-ListServersNegativeTestJSON-1980913997-project-member] Lock "b720f9f1-9401-40b1-978b-9b8eefe712ea" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 111.797s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 881.724166] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg b16391cf6aab48a0aa062b39978227be in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 881.733594] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b16391cf6aab48a0aa062b39978227be [ 881.891784] env[61273]: INFO nova.compute.manager [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 9952d347-2ca7-48f2-8ee1-dc1d767402dc] Took 1.02 seconds to deallocate network for instance. [ 881.893465] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg b2a2662920b34b6ca83e83988dcb617a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 881.923240] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b2a2662920b34b6ca83e83988dcb617a [ 882.009786] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Expecting reply to msg 9bf0a97c3bd24ca2ac8443c51b4bdf0f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 882.017725] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9bf0a97c3bd24ca2ac8443c51b4bdf0f [ 882.025671] env[61273]: DEBUG nova.network.neutron [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 882.107472] env[61273]: DEBUG nova.network.neutron [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 882.107973] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg de8a4689ecf64237a31462811189a863 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 882.116258] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg de8a4689ecf64237a31462811189a863 [ 882.226208] env[61273]: DEBUG nova.compute.manager [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 882.228112] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg 004ecc7608c046a6bbd0c8e96cce0f1b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 882.261840] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 004ecc7608c046a6bbd0c8e96cce0f1b [ 882.397894] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 7612be002d704972879f7d4b68d83de5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 882.428973] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7612be002d704972879f7d4b68d83de5 [ 882.609803] env[61273]: DEBUG oslo_concurrency.lockutils [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Releasing lock "refresh_cache-1fde207b-9d32-4cff-b3fe-d0caddd20f69" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 882.611217] env[61273]: DEBUG nova.compute.manager [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 882.611217] env[61273]: DEBUG nova.compute.manager [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 882.611217] env[61273]: DEBUG nova.network.neutron [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 882.627287] env[61273]: DEBUG nova.network.neutron [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 882.627861] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg 7cb6d767f8704c05bcb7d2b9a0e86b88 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 882.637250] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7cb6d767f8704c05bcb7d2b9a0e86b88 [ 882.648700] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4784edf-22fc-433c-8579-c8a655388914 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.656757] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46cc1152-90c7-41ba-be7c-672a0a4d314d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.686455] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff6bc62a-483c-48e0-b7a7-1f0fac3651f2 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.693529] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-681d6282-f4b7-4e98-b3d8-de027b0c705e {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.706238] env[61273]: DEBUG nova.compute.provider_tree [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 882.706851] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Expecting reply to msg f177f38136204747b77258749de2c779 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 882.713900] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f177f38136204747b77258749de2c779 [ 882.744468] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 882.921904] env[61273]: INFO nova.scheduler.client.report [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Deleted allocations for instance 9952d347-2ca7-48f2-8ee1-dc1d767402dc [ 882.929696] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 0a254707d04e483a9c2cf8b9bb123268 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 882.941156] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0a254707d04e483a9c2cf8b9bb123268 [ 883.130120] env[61273]: DEBUG nova.network.neutron [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 883.130667] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg 38404c8fa52b401cb03ebb9860e8aa36 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 883.141122] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 38404c8fa52b401cb03ebb9860e8aa36 [ 883.209202] env[61273]: DEBUG nova.scheduler.client.report [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 883.211590] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Expecting reply to msg 57b8db172e6140a598f3a050c5a87ae1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 883.230586] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 57b8db172e6140a598f3a050c5a87ae1 [ 883.431997] env[61273]: DEBUG oslo_concurrency.lockutils [None req-87c9ed44-1ef0-4cc4-9e27-929d7440ed48 tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Lock "9952d347-2ca7-48f2-8ee1-dc1d767402dc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 103.345s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 883.432683] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg 2759ac17a2634d3b86b37ef38c8354cc in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 883.442259] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2759ac17a2634d3b86b37ef38c8354cc [ 883.634171] env[61273]: INFO nova.compute.manager [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 1fde207b-9d32-4cff-b3fe-d0caddd20f69] Took 1.02 seconds to deallocate network for instance. [ 883.635902] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg 5e3383d3574746e585666c62e9a383fe in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 883.676458] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5e3383d3574746e585666c62e9a383fe [ 883.714786] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.213s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 883.715322] env[61273]: DEBUG nova.compute.manager [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 883.717125] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Expecting reply to msg 6333d1815adc4383a4a8167af1d26284 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 883.718386] env[61273]: DEBUG oslo_concurrency.lockutils [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.468s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 883.719872] env[61273]: INFO nova.compute.claims [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 883.721343] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg a3a0ad98eec64c16b3c645b781cb528b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 883.748505] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6333d1815adc4383a4a8167af1d26284 [ 883.754464] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a3a0ad98eec64c16b3c645b781cb528b [ 883.934814] env[61273]: DEBUG nova.compute.manager [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 883.936736] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg a6ea3172c0964db0b40a72786ba17e3d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 883.976511] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a6ea3172c0964db0b40a72786ba17e3d [ 884.141450] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg d73152bc064b4368a9b5792d1f9d067c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 884.180395] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d73152bc064b4368a9b5792d1f9d067c [ 884.224843] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 8814b1badcfb43118597b8e6ceb93d36 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 884.228536] env[61273]: DEBUG nova.compute.utils [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 884.228536] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Expecting reply to msg 182c0c998a8041b8b62ce55f216e3408 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 884.228536] env[61273]: DEBUG nova.compute.manager [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 884.228536] env[61273]: DEBUG nova.network.neutron [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 884.235899] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8814b1badcfb43118597b8e6ceb93d36 [ 884.239927] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 182c0c998a8041b8b62ce55f216e3408 [ 884.276395] env[61273]: DEBUG nova.policy [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2aafa17161df48feb736cef1e2ab52fd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b6946d58a1af45eb96597fef8e0c62f5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 884.456286] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 884.650607] env[61273]: DEBUG nova.network.neutron [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] Successfully created port: 4bd4596c-8a2e-47f5-9a3c-6664f1a4b1ac {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 884.666730] env[61273]: INFO nova.scheduler.client.report [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Deleted allocations for instance 1fde207b-9d32-4cff-b3fe-d0caddd20f69 [ 884.672908] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg 7619191c6a6649078090dbac5157f8e4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 884.693084] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7619191c6a6649078090dbac5157f8e4 [ 884.733556] env[61273]: DEBUG nova.compute.manager [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 884.735374] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Expecting reply to msg 9d71aaedb4cc4964b204be46f0242d07 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 884.787710] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9d71aaedb4cc4964b204be46f0242d07 [ 884.898910] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66e6ab70-1e6e-49a1-8553-22c752e16f0e {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.906460] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-827402ed-3d95-4904-96bd-a740d77c0341 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.945287] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21ef6148-6a30-4c39-839d-56e9f5de5410 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.958657] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fc5d3c8-d838-4174-b6a2-2640cfe27d7e {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.972047] env[61273]: DEBUG nova.compute.provider_tree [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 884.972764] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg e900f74c200b473793908d8e652d49cc in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 884.980766] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e900f74c200b473793908d8e652d49cc [ 885.011946] env[61273]: DEBUG nova.network.neutron [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] Successfully created port: 0828ce28-52c7-4741-9ffe-d44c410f1a88 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 885.180205] env[61273]: DEBUG oslo_concurrency.lockutils [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Lock "1fde207b-9d32-4cff-b3fe-d0caddd20f69" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 93.725s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 885.180812] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 4db2353427e74cf0b96fb537844938d3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 885.190996] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4db2353427e74cf0b96fb537844938d3 [ 885.239769] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Expecting reply to msg ff563a55be35463fa4c83fe40a9da0ce in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 885.306155] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ff563a55be35463fa4c83fe40a9da0ce [ 885.389211] env[61273]: DEBUG oslo_concurrency.lockutils [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Acquiring lock "a70f220a-fa34-44af-939f-29292b556897" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 885.389453] env[61273]: DEBUG oslo_concurrency.lockutils [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Lock "a70f220a-fa34-44af-939f-29292b556897" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 885.475116] env[61273]: DEBUG nova.scheduler.client.report [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 885.483384] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 8c7da04cc20c4d8ba2c02ff848af8dce in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 885.489498] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8c7da04cc20c4d8ba2c02ff848af8dce [ 885.686307] env[61273]: DEBUG nova.compute.manager [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 885.686307] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg b4727e5e18964f8a963d0544b8970282 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 885.719689] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b4727e5e18964f8a963d0544b8970282 [ 885.744682] env[61273]: DEBUG nova.compute.manager [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 885.773651] env[61273]: DEBUG nova.compute.manager [req-010c2b24-6b58-4314-8014-1549b8b35df4 req-07c7dd6b-372a-4a1c-b3ba-6ceef723f215 service nova] [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] Received event network-changed-4bd4596c-8a2e-47f5-9a3c-6664f1a4b1ac {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 885.773651] env[61273]: DEBUG nova.compute.manager [req-010c2b24-6b58-4314-8014-1549b8b35df4 req-07c7dd6b-372a-4a1c-b3ba-6ceef723f215 service nova] [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] Refreshing instance network info cache due to event network-changed-4bd4596c-8a2e-47f5-9a3c-6664f1a4b1ac. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 885.773651] env[61273]: DEBUG oslo_concurrency.lockutils [req-010c2b24-6b58-4314-8014-1549b8b35df4 req-07c7dd6b-372a-4a1c-b3ba-6ceef723f215 service nova] Acquiring lock "refresh_cache-9adae455-b609-4ecb-8841-43fb4d826f84" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 885.773651] env[61273]: DEBUG oslo_concurrency.lockutils [req-010c2b24-6b58-4314-8014-1549b8b35df4 req-07c7dd6b-372a-4a1c-b3ba-6ceef723f215 service nova] Acquired lock "refresh_cache-9adae455-b609-4ecb-8841-43fb4d826f84" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 885.773651] env[61273]: DEBUG nova.network.neutron [req-010c2b24-6b58-4314-8014-1549b8b35df4 req-07c7dd6b-372a-4a1c-b3ba-6ceef723f215 service nova] [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] Refreshing network info cache for port 4bd4596c-8a2e-47f5-9a3c-6664f1a4b1ac {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 885.774145] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-010c2b24-6b58-4314-8014-1549b8b35df4 req-07c7dd6b-372a-4a1c-b3ba-6ceef723f215 service nova] Expecting reply to msg 5511c7f0413f45bfa28ed0c17364fc9c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 885.775135] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5511c7f0413f45bfa28ed0c17364fc9c [ 885.781748] env[61273]: DEBUG nova.virt.hardware [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 885.782138] env[61273]: DEBUG nova.virt.hardware [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 885.782403] env[61273]: DEBUG nova.virt.hardware [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 885.782687] env[61273]: DEBUG nova.virt.hardware [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 885.782941] env[61273]: DEBUG nova.virt.hardware [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 885.783211] env[61273]: DEBUG nova.virt.hardware [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 885.783522] env[61273]: DEBUG nova.virt.hardware [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 885.783783] env[61273]: DEBUG nova.virt.hardware [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 885.784082] env[61273]: DEBUG nova.virt.hardware [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 885.784355] env[61273]: DEBUG nova.virt.hardware [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 885.784671] env[61273]: DEBUG nova.virt.hardware [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 885.786192] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a295e8dc-cbb3-4048-bca3-ca9db1baaef0 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.796961] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2305d19-c3e9-46e7-8058-f7d7672481b0 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.831614] env[61273]: DEBUG nova.network.neutron [req-010c2b24-6b58-4314-8014-1549b8b35df4 req-07c7dd6b-372a-4a1c-b3ba-6ceef723f215 service nova] [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 885.932955] env[61273]: ERROR nova.compute.manager [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 4bd4596c-8a2e-47f5-9a3c-6664f1a4b1ac, please check neutron logs for more information. [ 885.932955] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 885.932955] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 885.932955] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 885.932955] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 885.932955] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 885.932955] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 885.932955] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 885.932955] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 885.932955] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 885.932955] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 885.932955] env[61273]: ERROR nova.compute.manager raise self.value [ 885.932955] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 885.932955] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 885.932955] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 885.932955] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 885.933490] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 885.933490] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 885.933490] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 4bd4596c-8a2e-47f5-9a3c-6664f1a4b1ac, please check neutron logs for more information. [ 885.933490] env[61273]: ERROR nova.compute.manager [ 885.933859] env[61273]: Traceback (most recent call last): [ 885.933968] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 885.933968] env[61273]: listener.cb(fileno) [ 885.934042] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 885.934042] env[61273]: result = function(*args, **kwargs) [ 885.934188] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 885.934188] env[61273]: return func(*args, **kwargs) [ 885.934261] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 885.934261] env[61273]: raise e [ 885.934331] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 885.934331] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 885.934400] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 885.934400] env[61273]: created_port_ids = self._update_ports_for_instance( [ 885.934473] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 885.934473] env[61273]: with excutils.save_and_reraise_exception(): [ 885.934548] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 885.934548] env[61273]: self.force_reraise() [ 885.934618] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 885.934618] env[61273]: raise self.value [ 885.934689] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 885.934689] env[61273]: updated_port = self._update_port( [ 885.934760] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 885.934760] env[61273]: _ensure_no_port_binding_failure(port) [ 885.934829] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 885.934829] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 885.934912] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 4bd4596c-8a2e-47f5-9a3c-6664f1a4b1ac, please check neutron logs for more information. [ 885.934963] env[61273]: Removing descriptor: 15 [ 885.935770] env[61273]: ERROR nova.compute.manager [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 4bd4596c-8a2e-47f5-9a3c-6664f1a4b1ac, please check neutron logs for more information. [ 885.935770] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] Traceback (most recent call last): [ 885.935770] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 885.935770] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] yield resources [ 885.935770] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 885.935770] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] self.driver.spawn(context, instance, image_meta, [ 885.935770] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 885.935770] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] self._vmops.spawn(context, instance, image_meta, injected_files, [ 885.935770] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 885.935770] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] vm_ref = self.build_virtual_machine(instance, [ 885.935770] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 885.936171] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] vif_infos = vmwarevif.get_vif_info(self._session, [ 885.936171] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 885.936171] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] for vif in network_info: [ 885.936171] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 885.936171] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] return self._sync_wrapper(fn, *args, **kwargs) [ 885.936171] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 885.936171] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] self.wait() [ 885.936171] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 885.936171] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] self[:] = self._gt.wait() [ 885.936171] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 885.936171] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] return self._exit_event.wait() [ 885.936171] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 885.936171] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] result = hub.switch() [ 885.936551] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 885.936551] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] return self.greenlet.switch() [ 885.936551] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 885.936551] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] result = function(*args, **kwargs) [ 885.936551] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 885.936551] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] return func(*args, **kwargs) [ 885.936551] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 885.936551] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] raise e [ 885.936551] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 885.936551] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] nwinfo = self.network_api.allocate_for_instance( [ 885.936551] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 885.936551] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] created_port_ids = self._update_ports_for_instance( [ 885.936551] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 885.936941] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] with excutils.save_and_reraise_exception(): [ 885.936941] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 885.936941] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] self.force_reraise() [ 885.936941] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 885.936941] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] raise self.value [ 885.936941] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 885.936941] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] updated_port = self._update_port( [ 885.936941] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 885.936941] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] _ensure_no_port_binding_failure(port) [ 885.936941] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 885.936941] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] raise exception.PortBindingFailed(port_id=port['id']) [ 885.936941] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] nova.exception.PortBindingFailed: Binding failed for port 4bd4596c-8a2e-47f5-9a3c-6664f1a4b1ac, please check neutron logs for more information. [ 885.936941] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] [ 885.937734] env[61273]: INFO nova.compute.manager [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] Terminating instance [ 885.939999] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Acquiring lock "refresh_cache-9adae455-b609-4ecb-8841-43fb4d826f84" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 885.980572] env[61273]: DEBUG oslo_concurrency.lockutils [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.262s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 885.981585] env[61273]: DEBUG nova.compute.manager [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 885.983867] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg fa60babe9f0f438eb302d45c3b0b7581 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 885.986418] env[61273]: DEBUG nova.network.neutron [req-010c2b24-6b58-4314-8014-1549b8b35df4 req-07c7dd6b-372a-4a1c-b3ba-6ceef723f215 service nova] [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 885.987267] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-010c2b24-6b58-4314-8014-1549b8b35df4 req-07c7dd6b-372a-4a1c-b3ba-6ceef723f215 service nova] Expecting reply to msg 9532e198ee8b42daa184c62b10080959 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 885.988517] env[61273]: DEBUG oslo_concurrency.lockutils [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.943s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 885.990539] env[61273]: INFO nova.compute.claims [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 886.000047] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Expecting reply to msg 61ae6c70fba847958ba4b68b96ae2bbb in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 886.004484] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9532e198ee8b42daa184c62b10080959 [ 886.040359] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fa60babe9f0f438eb302d45c3b0b7581 [ 886.052399] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 61ae6c70fba847958ba4b68b96ae2bbb [ 886.207242] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 886.506350] env[61273]: DEBUG oslo_concurrency.lockutils [req-010c2b24-6b58-4314-8014-1549b8b35df4 req-07c7dd6b-372a-4a1c-b3ba-6ceef723f215 service nova] Releasing lock "refresh_cache-9adae455-b609-4ecb-8841-43fb4d826f84" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 886.506350] env[61273]: DEBUG nova.compute.utils [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 886.506754] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg a878c882c3b242b1b3dfdd8d534eb0cd in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 886.507313] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Acquired lock "refresh_cache-9adae455-b609-4ecb-8841-43fb4d826f84" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 886.507489] env[61273]: DEBUG nova.network.neutron [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 886.507843] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Expecting reply to msg 4e6a902fc48a465e849002d4200aea16 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 886.510148] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Expecting reply to msg 170fcdbd79794d45858c481d1e693388 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 886.510930] env[61273]: DEBUG nova.compute.manager [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 886.511121] env[61273]: DEBUG nova.network.neutron [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 886.514764] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4e6a902fc48a465e849002d4200aea16 [ 886.524437] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 170fcdbd79794d45858c481d1e693388 [ 886.524900] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a878c882c3b242b1b3dfdd8d534eb0cd [ 886.636131] env[61273]: DEBUG nova.policy [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8969ac54b88a47028e5784f6575f2d61', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '63d2fb7de0ad453dbe6891e6974f1b66', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 887.014224] env[61273]: DEBUG nova.compute.manager [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 887.015974] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 82c6c34be0f54ac3af474cf21d0d984b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 887.049437] env[61273]: DEBUG nova.network.neutron [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 887.067047] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 82c6c34be0f54ac3af474cf21d0d984b [ 887.151920] env[61273]: DEBUG nova.network.neutron [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 887.152447] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Expecting reply to msg 933d420861e34966b85e7f7f85cdc202 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 887.160258] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 933d420861e34966b85e7f7f85cdc202 [ 887.162542] env[61273]: DEBUG nova.network.neutron [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] Successfully created port: bfc6fb0b-0208-45a8-ba4f-77ee0a986c82 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 887.217983] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-127a5b5d-a014-4bc4-99a5-7fbf529075b0 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.225247] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b12127a1-92bf-424d-b3e9-136469ffc3fe {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.254831] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce959113-454c-4230-87e7-214f66ddb391 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.261814] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5df4851-f263-41cf-80b6-4adf5fb2a4f6 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.275989] env[61273]: DEBUG nova.compute.provider_tree [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 887.276525] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Expecting reply to msg 368efa76f1e54652b80d190f1946f6e2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 887.284494] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 368efa76f1e54652b80d190f1946f6e2 [ 887.524971] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 85f3154b409d421b9501563cfe49740e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 887.555045] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 85f3154b409d421b9501563cfe49740e [ 887.654786] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Releasing lock "refresh_cache-9adae455-b609-4ecb-8841-43fb4d826f84" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 887.655139] env[61273]: DEBUG nova.compute.manager [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 887.655254] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 887.655564] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-094f67c5-4c07-4a69-af08-e9c6e5d89fa2 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.664445] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-505a5e04-4aa2-4f86-b4cc-8be95731de96 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.686219] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9adae455-b609-4ecb-8841-43fb4d826f84 could not be found. [ 887.686269] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 887.686438] env[61273]: INFO nova.compute.manager [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] Took 0.03 seconds to destroy the instance on the hypervisor. [ 887.686689] env[61273]: DEBUG oslo.service.loopingcall [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 887.686900] env[61273]: DEBUG nova.compute.manager [-] [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 887.686988] env[61273]: DEBUG nova.network.neutron [-] [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 887.778252] env[61273]: DEBUG nova.network.neutron [-] [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 887.780461] env[61273]: DEBUG nova.scheduler.client.report [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 887.782805] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Expecting reply to msg 35b985f515df4253900f6399c402a9a3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 887.793708] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 35b985f515df4253900f6399c402a9a3 [ 887.797979] env[61273]: DEBUG nova.compute.manager [req-f46f8949-f70f-48d4-91fa-38d39dca499b req-e36824b0-e3be-4431-bf4a-090c15235400 service nova] [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] Received event network-vif-deleted-4bd4596c-8a2e-47f5-9a3c-6664f1a4b1ac {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 888.027708] env[61273]: DEBUG nova.compute.manager [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 888.053282] env[61273]: DEBUG nova.virt.hardware [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 888.053522] env[61273]: DEBUG nova.virt.hardware [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 888.053686] env[61273]: DEBUG nova.virt.hardware [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 888.053870] env[61273]: DEBUG nova.virt.hardware [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 888.054015] env[61273]: DEBUG nova.virt.hardware [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 888.054159] env[61273]: DEBUG nova.virt.hardware [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 888.054364] env[61273]: DEBUG nova.virt.hardware [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 888.054520] env[61273]: DEBUG nova.virt.hardware [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 888.054680] env[61273]: DEBUG nova.virt.hardware [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 888.054840] env[61273]: DEBUG nova.virt.hardware [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 888.055011] env[61273]: DEBUG nova.virt.hardware [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 888.056197] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-574752fc-7753-405a-a093-2a3005d7e96f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.059230] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 9ac8bb882c624ff4831b19cb5bcb6bb0 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 888.068417] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9ac8bb882c624ff4831b19cb5bcb6bb0 [ 888.069750] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12a682ad-89b3-48c4-a387-8664ebcaff4d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.180785] env[61273]: DEBUG nova.compute.manager [req-500af819-a911-4726-b9a0-5d6b3ac3d948 req-2dffebf3-1b7b-4436-9060-0243d8155d09 service nova] [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] Received event network-changed-bfc6fb0b-0208-45a8-ba4f-77ee0a986c82 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 888.180978] env[61273]: DEBUG nova.compute.manager [req-500af819-a911-4726-b9a0-5d6b3ac3d948 req-2dffebf3-1b7b-4436-9060-0243d8155d09 service nova] [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] Refreshing instance network info cache due to event network-changed-bfc6fb0b-0208-45a8-ba4f-77ee0a986c82. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 888.181192] env[61273]: DEBUG oslo_concurrency.lockutils [req-500af819-a911-4726-b9a0-5d6b3ac3d948 req-2dffebf3-1b7b-4436-9060-0243d8155d09 service nova] Acquiring lock "refresh_cache-9cedc314-173e-4686-8ee5-28c2512dbcba" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 888.181331] env[61273]: DEBUG oslo_concurrency.lockutils [req-500af819-a911-4726-b9a0-5d6b3ac3d948 req-2dffebf3-1b7b-4436-9060-0243d8155d09 service nova] Acquired lock "refresh_cache-9cedc314-173e-4686-8ee5-28c2512dbcba" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 888.181586] env[61273]: DEBUG nova.network.neutron [req-500af819-a911-4726-b9a0-5d6b3ac3d948 req-2dffebf3-1b7b-4436-9060-0243d8155d09 service nova] [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] Refreshing network info cache for port bfc6fb0b-0208-45a8-ba4f-77ee0a986c82 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 888.182026] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-500af819-a911-4726-b9a0-5d6b3ac3d948 req-2dffebf3-1b7b-4436-9060-0243d8155d09 service nova] Expecting reply to msg 70514028a6974cfc809f47a469ca3ddf in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 888.197243] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 70514028a6974cfc809f47a469ca3ddf [ 888.285222] env[61273]: DEBUG oslo_concurrency.lockutils [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.297s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 888.285776] env[61273]: DEBUG nova.compute.manager [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 888.287566] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Expecting reply to msg 7c04a4205e95441688ce2bdafa7a58cc in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 888.288738] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.417s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 888.299776] env[61273]: INFO nova.compute.claims [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 888.299776] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] Expecting reply to msg aa06d40f5733423f9d4a7d25c8f6a31d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 888.325826] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7c04a4205e95441688ce2bdafa7a58cc [ 888.327791] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aa06d40f5733423f9d4a7d25c8f6a31d [ 888.550096] env[61273]: ERROR nova.compute.manager [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port bfc6fb0b-0208-45a8-ba4f-77ee0a986c82, please check neutron logs for more information. [ 888.550096] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 888.550096] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 888.550096] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 888.550096] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 888.550096] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 888.550096] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 888.550096] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 888.550096] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 888.550096] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 888.550096] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 888.550096] env[61273]: ERROR nova.compute.manager raise self.value [ 888.550096] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 888.550096] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 888.550096] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 888.550096] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 888.550724] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 888.550724] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 888.550724] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port bfc6fb0b-0208-45a8-ba4f-77ee0a986c82, please check neutron logs for more information. [ 888.550724] env[61273]: ERROR nova.compute.manager [ 888.550724] env[61273]: Traceback (most recent call last): [ 888.550724] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 888.550724] env[61273]: listener.cb(fileno) [ 888.550724] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 888.550724] env[61273]: result = function(*args, **kwargs) [ 888.550724] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 888.550724] env[61273]: return func(*args, **kwargs) [ 888.550724] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 888.550724] env[61273]: raise e [ 888.550724] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 888.550724] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 888.550724] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 888.550724] env[61273]: created_port_ids = self._update_ports_for_instance( [ 888.550724] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 888.550724] env[61273]: with excutils.save_and_reraise_exception(): [ 888.550724] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 888.550724] env[61273]: self.force_reraise() [ 888.550724] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 888.550724] env[61273]: raise self.value [ 888.550724] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 888.550724] env[61273]: updated_port = self._update_port( [ 888.550724] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 888.550724] env[61273]: _ensure_no_port_binding_failure(port) [ 888.550724] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 888.550724] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 888.551421] env[61273]: nova.exception.PortBindingFailed: Binding failed for port bfc6fb0b-0208-45a8-ba4f-77ee0a986c82, please check neutron logs for more information. [ 888.551421] env[61273]: Removing descriptor: 19 [ 888.551421] env[61273]: ERROR nova.compute.manager [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port bfc6fb0b-0208-45a8-ba4f-77ee0a986c82, please check neutron logs for more information. [ 888.551421] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] Traceback (most recent call last): [ 888.551421] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 888.551421] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] yield resources [ 888.551421] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 888.551421] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] self.driver.spawn(context, instance, image_meta, [ 888.551421] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 888.551421] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] self._vmops.spawn(context, instance, image_meta, injected_files, [ 888.551421] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 888.551421] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] vm_ref = self.build_virtual_machine(instance, [ 888.551765] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 888.551765] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] vif_infos = vmwarevif.get_vif_info(self._session, [ 888.551765] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 888.551765] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] for vif in network_info: [ 888.551765] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 888.551765] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] return self._sync_wrapper(fn, *args, **kwargs) [ 888.551765] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 888.551765] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] self.wait() [ 888.551765] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 888.551765] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] self[:] = self._gt.wait() [ 888.551765] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 888.551765] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] return self._exit_event.wait() [ 888.551765] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 888.552134] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] result = hub.switch() [ 888.552134] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 888.552134] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] return self.greenlet.switch() [ 888.552134] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 888.552134] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] result = function(*args, **kwargs) [ 888.552134] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 888.552134] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] return func(*args, **kwargs) [ 888.552134] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 888.552134] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] raise e [ 888.552134] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 888.552134] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] nwinfo = self.network_api.allocate_for_instance( [ 888.552134] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 888.552134] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] created_port_ids = self._update_ports_for_instance( [ 888.552460] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 888.552460] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] with excutils.save_and_reraise_exception(): [ 888.552460] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 888.552460] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] self.force_reraise() [ 888.552460] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 888.552460] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] raise self.value [ 888.552460] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 888.552460] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] updated_port = self._update_port( [ 888.552460] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 888.552460] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] _ensure_no_port_binding_failure(port) [ 888.552460] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 888.552460] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] raise exception.PortBindingFailed(port_id=port['id']) [ 888.552763] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] nova.exception.PortBindingFailed: Binding failed for port bfc6fb0b-0208-45a8-ba4f-77ee0a986c82, please check neutron logs for more information. [ 888.552763] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] [ 888.552763] env[61273]: INFO nova.compute.manager [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] Terminating instance [ 888.555487] env[61273]: DEBUG oslo_concurrency.lockutils [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Acquiring lock "refresh_cache-9cedc314-173e-4686-8ee5-28c2512dbcba" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 888.563334] env[61273]: DEBUG nova.network.neutron [-] [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 888.563716] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 52af48a8e3fe4ee1835c38dedfbbf62f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 888.572063] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 52af48a8e3fe4ee1835c38dedfbbf62f [ 888.705741] env[61273]: DEBUG nova.network.neutron [req-500af819-a911-4726-b9a0-5d6b3ac3d948 req-2dffebf3-1b7b-4436-9060-0243d8155d09 service nova] [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 888.796713] env[61273]: DEBUG nova.compute.utils [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 888.796713] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Expecting reply to msg 15eeb17f470b45fa9e572a636e7e88b0 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 888.799038] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] Expecting reply to msg fe98ce4484c34c788c49ba1f456a95bd in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 888.801564] env[61273]: DEBUG nova.compute.manager [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 888.802010] env[61273]: DEBUG nova.network.neutron [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 888.806690] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fe98ce4484c34c788c49ba1f456a95bd [ 888.808018] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 15eeb17f470b45fa9e572a636e7e88b0 [ 888.836577] env[61273]: DEBUG nova.network.neutron [req-500af819-a911-4726-b9a0-5d6b3ac3d948 req-2dffebf3-1b7b-4436-9060-0243d8155d09 service nova] [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 888.837145] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-500af819-a911-4726-b9a0-5d6b3ac3d948 req-2dffebf3-1b7b-4436-9060-0243d8155d09 service nova] Expecting reply to msg bc4aa99a6cea4dc2b661205e36723caf in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 888.844648] env[61273]: DEBUG nova.policy [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9aff8d5e22844391837fad86f2cd243a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1463a6a43bbe48f886efed86e8e24482', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 888.847396] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bc4aa99a6cea4dc2b661205e36723caf [ 889.067863] env[61273]: INFO nova.compute.manager [-] [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] Took 1.38 seconds to deallocate network for instance. [ 889.068776] env[61273]: DEBUG nova.compute.claims [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 889.068776] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 889.147381] env[61273]: DEBUG nova.network.neutron [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] Successfully created port: 228ecc96-89ef-4a4e-8571-06b5011021b9 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 889.300970] env[61273]: DEBUG nova.compute.manager [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 889.303006] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Expecting reply to msg 9b0100ec3eba4ffca8a863379653bf25 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 889.339332] env[61273]: DEBUG oslo_concurrency.lockutils [req-500af819-a911-4726-b9a0-5d6b3ac3d948 req-2dffebf3-1b7b-4436-9060-0243d8155d09 service nova] Releasing lock "refresh_cache-9cedc314-173e-4686-8ee5-28c2512dbcba" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 889.340196] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9b0100ec3eba4ffca8a863379653bf25 [ 889.341293] env[61273]: DEBUG oslo_concurrency.lockutils [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Acquired lock "refresh_cache-9cedc314-173e-4686-8ee5-28c2512dbcba" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 889.341293] env[61273]: DEBUG nova.network.neutron [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 889.341450] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg efbd42c9a7ff41bd97692c1ad9bd2630 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 889.353584] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg efbd42c9a7ff41bd97692c1ad9bd2630 [ 889.467614] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa5184ce-081d-4553-a56a-aff54318f6f1 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.476033] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7c2348c-189b-4053-a012-18a11b3fc33b {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.517899] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35899822-7ed5-4f59-9982-6b7cbe99d9d0 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.525252] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f8f355c-1ee2-4e0d-bde1-2f7bb818b068 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.538544] env[61273]: DEBUG nova.compute.provider_tree [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 889.539163] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] Expecting reply to msg 446ce76f775147e0ab12038c913d4922 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 889.547988] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 446ce76f775147e0ab12038c913d4922 [ 889.607108] env[61273]: DEBUG oslo_concurrency.lockutils [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Acquiring lock "c9214700-faf8-4a26-8084-ffe4a2c06480" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 889.607411] env[61273]: DEBUG oslo_concurrency.lockutils [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Lock "c9214700-faf8-4a26-8084-ffe4a2c06480" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 889.808330] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Expecting reply to msg 9aa859fed39047d2950a51822569aa71 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 889.840836] env[61273]: DEBUG nova.compute.manager [req-303f2f9b-824c-4cb0-a31d-beb19d84a59f req-acfa1ee6-2189-43f9-84ff-ffeb17048487 service nova] [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] Received event network-changed-228ecc96-89ef-4a4e-8571-06b5011021b9 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 889.840836] env[61273]: DEBUG nova.compute.manager [req-303f2f9b-824c-4cb0-a31d-beb19d84a59f req-acfa1ee6-2189-43f9-84ff-ffeb17048487 service nova] [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] Refreshing instance network info cache due to event network-changed-228ecc96-89ef-4a4e-8571-06b5011021b9. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 889.840836] env[61273]: DEBUG oslo_concurrency.lockutils [req-303f2f9b-824c-4cb0-a31d-beb19d84a59f req-acfa1ee6-2189-43f9-84ff-ffeb17048487 service nova] Acquiring lock "refresh_cache-31ab5ebd-3df1-4e9f-bf53-69d47176da01" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 889.840836] env[61273]: DEBUG oslo_concurrency.lockutils [req-303f2f9b-824c-4cb0-a31d-beb19d84a59f req-acfa1ee6-2189-43f9-84ff-ffeb17048487 service nova] Acquired lock "refresh_cache-31ab5ebd-3df1-4e9f-bf53-69d47176da01" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 889.840836] env[61273]: DEBUG nova.network.neutron [req-303f2f9b-824c-4cb0-a31d-beb19d84a59f req-acfa1ee6-2189-43f9-84ff-ffeb17048487 service nova] [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] Refreshing network info cache for port 228ecc96-89ef-4a4e-8571-06b5011021b9 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 889.841019] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-303f2f9b-824c-4cb0-a31d-beb19d84a59f req-acfa1ee6-2189-43f9-84ff-ffeb17048487 service nova] Expecting reply to msg 53ee1a4153d84ca3ab73e633a71cb5ee in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 889.844029] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 53ee1a4153d84ca3ab73e633a71cb5ee [ 889.861988] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9aa859fed39047d2950a51822569aa71 [ 889.867423] env[61273]: DEBUG nova.network.neutron [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 889.944748] env[61273]: ERROR nova.compute.manager [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 228ecc96-89ef-4a4e-8571-06b5011021b9, please check neutron logs for more information. [ 889.944748] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 889.944748] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 889.944748] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 889.944748] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 889.944748] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 889.944748] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 889.944748] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 889.944748] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 889.944748] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 889.944748] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 889.944748] env[61273]: ERROR nova.compute.manager raise self.value [ 889.944748] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 889.944748] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 889.944748] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 889.944748] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 889.945246] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 889.945246] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 889.945246] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 228ecc96-89ef-4a4e-8571-06b5011021b9, please check neutron logs for more information. [ 889.945246] env[61273]: ERROR nova.compute.manager [ 889.945246] env[61273]: Traceback (most recent call last): [ 889.945246] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 889.945246] env[61273]: listener.cb(fileno) [ 889.945246] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 889.945246] env[61273]: result = function(*args, **kwargs) [ 889.945246] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 889.945246] env[61273]: return func(*args, **kwargs) [ 889.945246] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 889.945246] env[61273]: raise e [ 889.945246] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 889.945246] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 889.945246] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 889.945246] env[61273]: created_port_ids = self._update_ports_for_instance( [ 889.945246] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 889.945246] env[61273]: with excutils.save_and_reraise_exception(): [ 889.945246] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 889.945246] env[61273]: self.force_reraise() [ 889.945246] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 889.945246] env[61273]: raise self.value [ 889.945246] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 889.945246] env[61273]: updated_port = self._update_port( [ 889.945246] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 889.945246] env[61273]: _ensure_no_port_binding_failure(port) [ 889.945246] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 889.945246] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 889.946085] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 228ecc96-89ef-4a4e-8571-06b5011021b9, please check neutron logs for more information. [ 889.946085] env[61273]: Removing descriptor: 15 [ 889.953432] env[61273]: DEBUG nova.network.neutron [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 889.954035] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg c5cba00bfab9455693427e8eac843220 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 889.961892] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c5cba00bfab9455693427e8eac843220 [ 890.042308] env[61273]: DEBUG nova.scheduler.client.report [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 890.044684] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] Expecting reply to msg e1fecbd009554080ad9b23bdc4a1546d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 890.057346] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e1fecbd009554080ad9b23bdc4a1546d [ 890.206429] env[61273]: DEBUG nova.compute.manager [req-03289d08-ecfc-48ca-9677-5feea2ffd7b6 req-e9a0c9d3-9aa1-443a-b0cf-4b6390cdf2bf service nova] [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] Received event network-vif-deleted-bfc6fb0b-0208-45a8-ba4f-77ee0a986c82 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 890.312383] env[61273]: DEBUG nova.compute.manager [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 890.336507] env[61273]: DEBUG nova.virt.hardware [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 890.336748] env[61273]: DEBUG nova.virt.hardware [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 890.336893] env[61273]: DEBUG nova.virt.hardware [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 890.337075] env[61273]: DEBUG nova.virt.hardware [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 890.337216] env[61273]: DEBUG nova.virt.hardware [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 890.337361] env[61273]: DEBUG nova.virt.hardware [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 890.337561] env[61273]: DEBUG nova.virt.hardware [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 890.337717] env[61273]: DEBUG nova.virt.hardware [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 890.337876] env[61273]: DEBUG nova.virt.hardware [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 890.338032] env[61273]: DEBUG nova.virt.hardware [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 890.338197] env[61273]: DEBUG nova.virt.hardware [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 890.341087] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6860d43f-12c9-424d-a9bd-30e8ca1ea502 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.349221] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b25be68-f791-46a9-a6b0-5addc837593a {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.362225] env[61273]: ERROR nova.compute.manager [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 228ecc96-89ef-4a4e-8571-06b5011021b9, please check neutron logs for more information. [ 890.362225] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] Traceback (most recent call last): [ 890.362225] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 890.362225] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] yield resources [ 890.362225] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 890.362225] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] self.driver.spawn(context, instance, image_meta, [ 890.362225] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 890.362225] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] self._vmops.spawn(context, instance, image_meta, injected_files, [ 890.362225] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 890.362225] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] vm_ref = self.build_virtual_machine(instance, [ 890.362225] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 890.362870] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] vif_infos = vmwarevif.get_vif_info(self._session, [ 890.362870] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 890.362870] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] for vif in network_info: [ 890.362870] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 890.362870] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] return self._sync_wrapper(fn, *args, **kwargs) [ 890.362870] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 890.362870] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] self.wait() [ 890.362870] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 890.362870] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] self[:] = self._gt.wait() [ 890.362870] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 890.362870] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] return self._exit_event.wait() [ 890.362870] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 890.362870] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] current.throw(*self._exc) [ 890.363537] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 890.363537] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] result = function(*args, **kwargs) [ 890.363537] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 890.363537] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] return func(*args, **kwargs) [ 890.363537] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 890.363537] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] raise e [ 890.363537] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 890.363537] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] nwinfo = self.network_api.allocate_for_instance( [ 890.363537] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 890.363537] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] created_port_ids = self._update_ports_for_instance( [ 890.363537] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 890.363537] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] with excutils.save_and_reraise_exception(): [ 890.363537] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 890.364581] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] self.force_reraise() [ 890.364581] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 890.364581] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] raise self.value [ 890.364581] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 890.364581] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] updated_port = self._update_port( [ 890.364581] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 890.364581] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] _ensure_no_port_binding_failure(port) [ 890.364581] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 890.364581] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] raise exception.PortBindingFailed(port_id=port['id']) [ 890.364581] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] nova.exception.PortBindingFailed: Binding failed for port 228ecc96-89ef-4a4e-8571-06b5011021b9, please check neutron logs for more information. [ 890.364581] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] [ 890.364581] env[61273]: INFO nova.compute.manager [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] Terminating instance [ 890.365138] env[61273]: DEBUG nova.network.neutron [req-303f2f9b-824c-4cb0-a31d-beb19d84a59f req-acfa1ee6-2189-43f9-84ff-ffeb17048487 service nova] [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 890.366093] env[61273]: DEBUG oslo_concurrency.lockutils [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Acquiring lock "refresh_cache-31ab5ebd-3df1-4e9f-bf53-69d47176da01" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 890.426612] env[61273]: DEBUG nova.network.neutron [req-303f2f9b-824c-4cb0-a31d-beb19d84a59f req-acfa1ee6-2189-43f9-84ff-ffeb17048487 service nova] [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 890.427126] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-303f2f9b-824c-4cb0-a31d-beb19d84a59f req-acfa1ee6-2189-43f9-84ff-ffeb17048487 service nova] Expecting reply to msg 66c6bd836669492caa8c3a0b3597e91b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 890.434945] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 66c6bd836669492caa8c3a0b3597e91b [ 890.455849] env[61273]: DEBUG oslo_concurrency.lockutils [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Releasing lock "refresh_cache-9cedc314-173e-4686-8ee5-28c2512dbcba" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 890.456271] env[61273]: DEBUG nova.compute.manager [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 890.456464] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 890.456745] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bc18ebdc-3bdf-4c4f-a736-12763bb61888 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.465106] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15a5440c-3ee8-4306-ba4e-73532f30d71d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.485217] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9cedc314-173e-4686-8ee5-28c2512dbcba could not be found. [ 890.485418] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 890.485593] env[61273]: INFO nova.compute.manager [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] Took 0.03 seconds to destroy the instance on the hypervisor. [ 890.485826] env[61273]: DEBUG oslo.service.loopingcall [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 890.486040] env[61273]: DEBUG nova.compute.manager [-] [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 890.486134] env[61273]: DEBUG nova.network.neutron [-] [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 890.501375] env[61273]: DEBUG nova.network.neutron [-] [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 890.501913] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 02eeb6bca966409692cdf272f1f67ffe in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 890.510645] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 02eeb6bca966409692cdf272f1f67ffe [ 890.549188] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.258s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 890.549188] env[61273]: DEBUG nova.compute.manager [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 890.549188] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] Expecting reply to msg f6122f22684c442dbb4c71905b9242c1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 890.551815] env[61273]: DEBUG oslo_concurrency.lockutils [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.900s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 890.554521] env[61273]: INFO nova.compute.claims [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 890.555609] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] Expecting reply to msg 53ae27609408413d918b91d4a29db9a1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 890.584914] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f6122f22684c442dbb4c71905b9242c1 [ 890.595332] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 53ae27609408413d918b91d4a29db9a1 [ 890.929571] env[61273]: DEBUG oslo_concurrency.lockutils [req-303f2f9b-824c-4cb0-a31d-beb19d84a59f req-acfa1ee6-2189-43f9-84ff-ffeb17048487 service nova] Releasing lock "refresh_cache-31ab5ebd-3df1-4e9f-bf53-69d47176da01" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 890.930003] env[61273]: DEBUG oslo_concurrency.lockutils [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Acquired lock "refresh_cache-31ab5ebd-3df1-4e9f-bf53-69d47176da01" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.930195] env[61273]: DEBUG nova.network.neutron [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 890.930642] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Expecting reply to msg 0cbc0d25b61747bfb9e58fd4716c5f5a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 890.939941] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0cbc0d25b61747bfb9e58fd4716c5f5a [ 891.004072] env[61273]: DEBUG nova.network.neutron [-] [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 891.004488] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg ce9fde491f464850bdab7e27a004acdb in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 891.012625] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ce9fde491f464850bdab7e27a004acdb [ 891.058486] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] Expecting reply to msg 3130824fe3794bc395fd4bb90988e9fb in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 891.060853] env[61273]: DEBUG nova.compute.utils [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 891.061409] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] Expecting reply to msg 956592de185d4855a05ea32f374c26bd in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 891.062713] env[61273]: DEBUG nova.compute.manager [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 891.062810] env[61273]: DEBUG nova.network.neutron [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 891.065608] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3130824fe3794bc395fd4bb90988e9fb [ 891.071083] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 956592de185d4855a05ea32f374c26bd [ 891.100144] env[61273]: DEBUG nova.policy [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '82cd1c91b7e04048bb5be1db3bdf6e2a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3ec0b17396a7411eaf00c5abdba0a16d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 891.367099] env[61273]: DEBUG nova.network.neutron [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] Successfully created port: 7a55ee69-0884-433c-9e35-dd184ba21c20 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 891.447742] env[61273]: DEBUG nova.network.neutron [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 891.506901] env[61273]: INFO nova.compute.manager [-] [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] Took 1.02 seconds to deallocate network for instance. [ 891.509228] env[61273]: DEBUG nova.compute.claims [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 891.509409] env[61273]: DEBUG oslo_concurrency.lockutils [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 891.541223] env[61273]: DEBUG nova.network.neutron [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 891.541223] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Expecting reply to msg 49fb8992ff554c13953fb93a3b2a9cca in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 891.557567] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 49fb8992ff554c13953fb93a3b2a9cca [ 891.564400] env[61273]: DEBUG nova.compute.manager [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 891.566075] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] Expecting reply to msg bf0556dca37a4d959cecc5a468aa5c85 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 891.623582] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bf0556dca37a4d959cecc5a468aa5c85 [ 891.757785] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37729630-9d4a-4780-984f-06cc505132b8 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.766238] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d49b682-50d0-45f3-8c20-85d893c889e6 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.796345] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ae7bf03-0820-4b2e-986a-fb3588ab869d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.806231] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e54a9b30-e44b-4482-8aae-c97b1d1fc5b0 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.826359] env[61273]: DEBUG nova.compute.provider_tree [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 891.827164] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] Expecting reply to msg 8eb562dc031c49f59e0086d2bb7eff17 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 891.840124] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8eb562dc031c49f59e0086d2bb7eff17 [ 891.866136] env[61273]: DEBUG nova.compute.manager [req-9f77b8f0-3f1e-4fd8-b3fe-411cf3b7c128 req-546daefb-ff6d-4741-8068-b88ca5007679 service nova] [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] Received event network-vif-deleted-228ecc96-89ef-4a4e-8571-06b5011021b9 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 892.040496] env[61273]: DEBUG oslo_concurrency.lockutils [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Releasing lock "refresh_cache-31ab5ebd-3df1-4e9f-bf53-69d47176da01" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 892.040967] env[61273]: DEBUG nova.compute.manager [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 892.041201] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 892.041515] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-84b0f44d-7178-4948-851c-aedff991e907 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.050215] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f66fa60c-19f5-4350-be27-77c58c954f5e {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.073419] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] Expecting reply to msg 1fca9d5d836043efaf62ed6ab6604532 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 892.085249] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 31ab5ebd-3df1-4e9f-bf53-69d47176da01 could not be found. [ 892.090341] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 892.090341] env[61273]: INFO nova.compute.manager [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] Took 0.05 seconds to destroy the instance on the hypervisor. [ 892.090341] env[61273]: DEBUG oslo.service.loopingcall [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 892.090341] env[61273]: DEBUG nova.compute.manager [-] [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 892.090341] env[61273]: DEBUG nova.network.neutron [-] [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 892.110817] env[61273]: DEBUG nova.network.neutron [-] [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 892.111366] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 13d5431057f542d3b255bb1838c53960 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 892.118395] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 13d5431057f542d3b255bb1838c53960 [ 892.125732] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1fca9d5d836043efaf62ed6ab6604532 [ 892.240919] env[61273]: DEBUG nova.compute.manager [req-9cda06ab-22a8-4db1-88a8-1831acabe599 req-85d03339-806e-44f3-8ebc-f4355e522fce service nova] [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] Received event network-changed-7a55ee69-0884-433c-9e35-dd184ba21c20 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 892.241123] env[61273]: DEBUG nova.compute.manager [req-9cda06ab-22a8-4db1-88a8-1831acabe599 req-85d03339-806e-44f3-8ebc-f4355e522fce service nova] [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] Refreshing instance network info cache due to event network-changed-7a55ee69-0884-433c-9e35-dd184ba21c20. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 892.241328] env[61273]: DEBUG oslo_concurrency.lockutils [req-9cda06ab-22a8-4db1-88a8-1831acabe599 req-85d03339-806e-44f3-8ebc-f4355e522fce service nova] Acquiring lock "refresh_cache-7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 892.241522] env[61273]: DEBUG oslo_concurrency.lockutils [req-9cda06ab-22a8-4db1-88a8-1831acabe599 req-85d03339-806e-44f3-8ebc-f4355e522fce service nova] Acquired lock "refresh_cache-7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 892.241717] env[61273]: DEBUG nova.network.neutron [req-9cda06ab-22a8-4db1-88a8-1831acabe599 req-85d03339-806e-44f3-8ebc-f4355e522fce service nova] [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] Refreshing network info cache for port 7a55ee69-0884-433c-9e35-dd184ba21c20 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 892.242141] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-9cda06ab-22a8-4db1-88a8-1831acabe599 req-85d03339-806e-44f3-8ebc-f4355e522fce service nova] Expecting reply to msg 6837838f3a7942c79d21271f30afc0be in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 892.250844] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6837838f3a7942c79d21271f30afc0be [ 892.329765] env[61273]: DEBUG nova.scheduler.client.report [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 892.332155] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] Expecting reply to msg c2d0f5112f4a464fb0de647e01e23f36 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 892.333761] env[61273]: ERROR nova.compute.manager [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 7a55ee69-0884-433c-9e35-dd184ba21c20, please check neutron logs for more information. [ 892.333761] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 892.333761] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 892.333761] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 892.333761] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 892.333761] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 892.333761] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 892.333761] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 892.333761] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 892.333761] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 892.333761] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 892.333761] env[61273]: ERROR nova.compute.manager raise self.value [ 892.333761] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 892.333761] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 892.333761] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 892.333761] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 892.334326] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 892.334326] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 892.334326] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 7a55ee69-0884-433c-9e35-dd184ba21c20, please check neutron logs for more information. [ 892.334326] env[61273]: ERROR nova.compute.manager [ 892.334326] env[61273]: Traceback (most recent call last): [ 892.334326] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 892.334326] env[61273]: listener.cb(fileno) [ 892.334326] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 892.334326] env[61273]: result = function(*args, **kwargs) [ 892.334326] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 892.334326] env[61273]: return func(*args, **kwargs) [ 892.334326] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 892.334326] env[61273]: raise e [ 892.334326] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 892.334326] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 892.334326] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 892.334326] env[61273]: created_port_ids = self._update_ports_for_instance( [ 892.334326] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 892.334326] env[61273]: with excutils.save_and_reraise_exception(): [ 892.334326] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 892.334326] env[61273]: self.force_reraise() [ 892.334326] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 892.334326] env[61273]: raise self.value [ 892.334326] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 892.334326] env[61273]: updated_port = self._update_port( [ 892.334326] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 892.334326] env[61273]: _ensure_no_port_binding_failure(port) [ 892.334326] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 892.334326] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 892.335178] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 7a55ee69-0884-433c-9e35-dd184ba21c20, please check neutron logs for more information. [ 892.335178] env[61273]: Removing descriptor: 19 [ 892.343667] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c2d0f5112f4a464fb0de647e01e23f36 [ 892.590103] env[61273]: DEBUG nova.compute.manager [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 892.614052] env[61273]: DEBUG nova.network.neutron [-] [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 892.614502] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 0302a418036c4a01ad1cefe2714ea7a4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 892.624504] env[61273]: DEBUG nova.virt.hardware [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 892.624504] env[61273]: DEBUG nova.virt.hardware [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 892.624504] env[61273]: DEBUG nova.virt.hardware [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 892.624969] env[61273]: DEBUG nova.virt.hardware [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 892.624969] env[61273]: DEBUG nova.virt.hardware [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 892.624969] env[61273]: DEBUG nova.virt.hardware [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 892.624969] env[61273]: DEBUG nova.virt.hardware [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 892.624969] env[61273]: DEBUG nova.virt.hardware [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 892.625143] env[61273]: DEBUG nova.virt.hardware [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 892.625143] env[61273]: DEBUG nova.virt.hardware [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 892.625143] env[61273]: DEBUG nova.virt.hardware [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 892.625143] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26adbfcd-3716-42d8-abc4-b649d2000929 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.625143] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0302a418036c4a01ad1cefe2714ea7a4 [ 892.630639] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-544e9f0b-e2d1-472e-a0dd-0bb91d935889 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.643631] env[61273]: ERROR nova.compute.manager [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 7a55ee69-0884-433c-9e35-dd184ba21c20, please check neutron logs for more information. [ 892.643631] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] Traceback (most recent call last): [ 892.643631] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 892.643631] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] yield resources [ 892.643631] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 892.643631] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] self.driver.spawn(context, instance, image_meta, [ 892.643631] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 892.643631] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] self._vmops.spawn(context, instance, image_meta, injected_files, [ 892.643631] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 892.643631] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] vm_ref = self.build_virtual_machine(instance, [ 892.643631] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 892.644032] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] vif_infos = vmwarevif.get_vif_info(self._session, [ 892.644032] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 892.644032] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] for vif in network_info: [ 892.644032] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 892.644032] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] return self._sync_wrapper(fn, *args, **kwargs) [ 892.644032] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 892.644032] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] self.wait() [ 892.644032] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 892.644032] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] self[:] = self._gt.wait() [ 892.644032] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 892.644032] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] return self._exit_event.wait() [ 892.644032] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 892.644032] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] current.throw(*self._exc) [ 892.644428] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 892.644428] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] result = function(*args, **kwargs) [ 892.644428] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 892.644428] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] return func(*args, **kwargs) [ 892.644428] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 892.644428] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] raise e [ 892.644428] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 892.644428] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] nwinfo = self.network_api.allocate_for_instance( [ 892.644428] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 892.644428] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] created_port_ids = self._update_ports_for_instance( [ 892.644428] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 892.644428] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] with excutils.save_and_reraise_exception(): [ 892.644428] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 892.644834] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] self.force_reraise() [ 892.644834] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 892.644834] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] raise self.value [ 892.644834] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 892.644834] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] updated_port = self._update_port( [ 892.644834] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 892.644834] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] _ensure_no_port_binding_failure(port) [ 892.644834] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 892.644834] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] raise exception.PortBindingFailed(port_id=port['id']) [ 892.644834] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] nova.exception.PortBindingFailed: Binding failed for port 7a55ee69-0884-433c-9e35-dd184ba21c20, please check neutron logs for more information. [ 892.644834] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] [ 892.644834] env[61273]: INFO nova.compute.manager [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] Terminating instance [ 892.645899] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] Acquiring lock "refresh_cache-7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 892.764832] env[61273]: DEBUG nova.network.neutron [req-9cda06ab-22a8-4db1-88a8-1831acabe599 req-85d03339-806e-44f3-8ebc-f4355e522fce service nova] [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 892.835771] env[61273]: DEBUG oslo_concurrency.lockutils [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.284s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 892.836345] env[61273]: DEBUG nova.compute.manager [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 892.838019] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] Expecting reply to msg 1a843b2db6394c8c954784379356f59e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 892.839333] env[61273]: DEBUG oslo_concurrency.lockutils [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 20.686s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 892.844124] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg 6912817e0d224dfa8b1420bfdcb02a28 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 892.883755] env[61273]: DEBUG nova.network.neutron [req-9cda06ab-22a8-4db1-88a8-1831acabe599 req-85d03339-806e-44f3-8ebc-f4355e522fce service nova] [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 892.883755] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-9cda06ab-22a8-4db1-88a8-1831acabe599 req-85d03339-806e-44f3-8ebc-f4355e522fce service nova] Expecting reply to msg e9ac4d117092440a8f1d77da60226901 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 892.883755] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1a843b2db6394c8c954784379356f59e [ 892.888508] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e9ac4d117092440a8f1d77da60226901 [ 892.891810] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6912817e0d224dfa8b1420bfdcb02a28 [ 893.125536] env[61273]: INFO nova.compute.manager [-] [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] Took 1.04 seconds to deallocate network for instance. [ 893.127922] env[61273]: DEBUG nova.compute.claims [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 893.128126] env[61273]: DEBUG oslo_concurrency.lockutils [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 893.347933] env[61273]: DEBUG nova.compute.utils [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 893.348583] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] Expecting reply to msg a82dd8b0048b4bc784d9b3ce1b49051b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 893.353205] env[61273]: DEBUG nova.compute.manager [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 893.353205] env[61273]: DEBUG nova.network.neutron [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 893.359644] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a82dd8b0048b4bc784d9b3ce1b49051b [ 893.383445] env[61273]: DEBUG oslo_concurrency.lockutils [req-9cda06ab-22a8-4db1-88a8-1831acabe599 req-85d03339-806e-44f3-8ebc-f4355e522fce service nova] Releasing lock "refresh_cache-7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 893.383806] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] Acquired lock "refresh_cache-7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 893.383993] env[61273]: DEBUG nova.network.neutron [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 893.384423] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] Expecting reply to msg 39398581c68e421d8cba81e2190b1d79 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 893.389003] env[61273]: DEBUG nova.policy [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'afebea38badd427c9eb0e0e73108283f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5007ed6b2ee248ee9478fd6fe2d329d3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 893.390923] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 39398581c68e421d8cba81e2190b1d79 [ 893.505633] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c53ea631-7e59-43b3-a9d7-e3a8a962a644 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.513615] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cf2f80e-7ce7-423a-be28-3d213b270767 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.543179] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34e84b62-a4f3-46d4-bdc4-12807e7378b2 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.550172] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bea0882-5594-435d-904c-66d58434e3dc {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.563440] env[61273]: DEBUG nova.compute.provider_tree [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 893.563949] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg 93a80e61870e4ab98a74cbeaaa7917b7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 893.571598] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 93a80e61870e4ab98a74cbeaaa7917b7 [ 893.637730] env[61273]: DEBUG nova.network.neutron [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] Successfully created port: b746e4c7-581b-4e6f-8b47-cc4c45268a37 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 893.853002] env[61273]: DEBUG nova.compute.manager [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 893.854795] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] Expecting reply to msg ac581a1737f24a6aa6ad201fe7d51eb3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 893.901061] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ac581a1737f24a6aa6ad201fe7d51eb3 [ 893.903797] env[61273]: DEBUG nova.network.neutron [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 893.980350] env[61273]: DEBUG nova.network.neutron [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 893.980861] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] Expecting reply to msg 1631ed0e58ad495cb0e11b660d77a32e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 893.988593] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1631ed0e58ad495cb0e11b660d77a32e [ 894.066205] env[61273]: DEBUG nova.scheduler.client.report [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 894.068512] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg 6cc4de78b45d48f799bc216c44e14834 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 894.086032] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6cc4de78b45d48f799bc216c44e14834 [ 894.301513] env[61273]: DEBUG nova.compute.manager [req-fc2afb20-1eb3-4d61-bb33-80f1692795be req-dc054787-0f93-41af-ab80-d78246d6001f service nova] [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] Received event network-vif-deleted-7a55ee69-0884-433c-9e35-dd184ba21c20 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 894.360752] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] Expecting reply to msg dda3021611f14753a6c84480e4d8eeb3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 894.393278] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dda3021611f14753a6c84480e4d8eeb3 [ 894.483322] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] Releasing lock "refresh_cache-7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 894.483564] env[61273]: DEBUG nova.compute.manager [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 894.483780] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 894.484451] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-651131fc-058c-4009-8d9e-f80837cf1339 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.492813] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bdc3d52-5e05-4a2b-8a18-6db7e7e47d16 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.514645] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef could not be found. [ 894.514645] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 894.514645] env[61273]: INFO nova.compute.manager [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] Took 0.03 seconds to destroy the instance on the hypervisor. [ 894.514766] env[61273]: DEBUG oslo.service.loopingcall [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 894.514894] env[61273]: DEBUG nova.compute.manager [-] [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 894.514990] env[61273]: DEBUG nova.network.neutron [-] [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 894.530438] env[61273]: DEBUG nova.network.neutron [-] [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 894.530897] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg a7ecc2c218e044f194b0ace380aed31a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 894.532295] env[61273]: ERROR nova.compute.manager [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b746e4c7-581b-4e6f-8b47-cc4c45268a37, please check neutron logs for more information. [ 894.532295] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 894.532295] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 894.532295] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 894.532295] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 894.532295] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 894.532295] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 894.532295] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 894.532295] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 894.532295] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 894.532295] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 894.532295] env[61273]: ERROR nova.compute.manager raise self.value [ 894.532295] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 894.532295] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 894.532295] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 894.532295] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 894.532836] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 894.532836] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 894.532836] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b746e4c7-581b-4e6f-8b47-cc4c45268a37, please check neutron logs for more information. [ 894.532836] env[61273]: ERROR nova.compute.manager [ 894.532836] env[61273]: Traceback (most recent call last): [ 894.532836] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 894.532836] env[61273]: listener.cb(fileno) [ 894.532836] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 894.532836] env[61273]: result = function(*args, **kwargs) [ 894.532836] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 894.532836] env[61273]: return func(*args, **kwargs) [ 894.532836] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 894.532836] env[61273]: raise e [ 894.532836] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 894.532836] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 894.532836] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 894.532836] env[61273]: created_port_ids = self._update_ports_for_instance( [ 894.532836] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 894.532836] env[61273]: with excutils.save_and_reraise_exception(): [ 894.532836] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 894.532836] env[61273]: self.force_reraise() [ 894.532836] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 894.532836] env[61273]: raise self.value [ 894.532836] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 894.532836] env[61273]: updated_port = self._update_port( [ 894.532836] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 894.532836] env[61273]: _ensure_no_port_binding_failure(port) [ 894.532836] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 894.532836] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 894.533685] env[61273]: nova.exception.PortBindingFailed: Binding failed for port b746e4c7-581b-4e6f-8b47-cc4c45268a37, please check neutron logs for more information. [ 894.533685] env[61273]: Removing descriptor: 19 [ 894.538015] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a7ecc2c218e044f194b0ace380aed31a [ 894.571369] env[61273]: DEBUG oslo_concurrency.lockutils [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.732s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 894.572068] env[61273]: ERROR nova.compute.manager [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 7fc6dd31-acd8-4eb7-952a-365a4a2614e1, please check neutron logs for more information. [ 894.572068] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] Traceback (most recent call last): [ 894.572068] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 894.572068] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] self.driver.spawn(context, instance, image_meta, [ 894.572068] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 894.572068] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 894.572068] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 894.572068] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] vm_ref = self.build_virtual_machine(instance, [ 894.572068] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 894.572068] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] vif_infos = vmwarevif.get_vif_info(self._session, [ 894.572068] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 894.572417] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] for vif in network_info: [ 894.572417] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 894.572417] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] return self._sync_wrapper(fn, *args, **kwargs) [ 894.572417] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 894.572417] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] self.wait() [ 894.572417] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 894.572417] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] self[:] = self._gt.wait() [ 894.572417] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 894.572417] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] return self._exit_event.wait() [ 894.572417] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 894.572417] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] current.throw(*self._exc) [ 894.572417] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 894.572417] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] result = function(*args, **kwargs) [ 894.572912] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 894.572912] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] return func(*args, **kwargs) [ 894.572912] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 894.572912] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] raise e [ 894.572912] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 894.572912] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] nwinfo = self.network_api.allocate_for_instance( [ 894.572912] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 894.572912] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] created_port_ids = self._update_ports_for_instance( [ 894.572912] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 894.572912] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] with excutils.save_and_reraise_exception(): [ 894.572912] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 894.572912] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] self.force_reraise() [ 894.572912] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 894.573312] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] raise self.value [ 894.573312] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 894.573312] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] updated_port = self._update_port( [ 894.573312] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 894.573312] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] _ensure_no_port_binding_failure(port) [ 894.573312] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 894.573312] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] raise exception.PortBindingFailed(port_id=port['id']) [ 894.573312] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] nova.exception.PortBindingFailed: Binding failed for port 7fc6dd31-acd8-4eb7-952a-365a4a2614e1, please check neutron logs for more information. [ 894.573312] env[61273]: ERROR nova.compute.manager [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] [ 894.573312] env[61273]: DEBUG nova.compute.utils [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] Binding failed for port 7fc6dd31-acd8-4eb7-952a-365a4a2614e1, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 894.574542] env[61273]: DEBUG nova.compute.manager [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] Build of instance 6182ea84-d5f2-4f01-9091-3d7b0b096d7c was re-scheduled: Binding failed for port 7fc6dd31-acd8-4eb7-952a-365a4a2614e1, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 894.575008] env[61273]: DEBUG nova.compute.manager [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 894.575272] env[61273]: DEBUG oslo_concurrency.lockutils [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Acquiring lock "refresh_cache-6182ea84-d5f2-4f01-9091-3d7b0b096d7c" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 894.575461] env[61273]: DEBUG oslo_concurrency.lockutils [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Acquired lock "refresh_cache-6182ea84-d5f2-4f01-9091-3d7b0b096d7c" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 894.575658] env[61273]: DEBUG nova.network.neutron [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 894.576187] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg daa04584e6ce48ba82dc4576c06921e3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 894.577796] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.686s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 894.579704] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg bcf057734228401b82f0ccb786bf1abd in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 894.583229] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg daa04584e6ce48ba82dc4576c06921e3 [ 894.673024] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bcf057734228401b82f0ccb786bf1abd [ 894.872491] env[61273]: DEBUG nova.compute.manager [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 894.897142] env[61273]: DEBUG nova.virt.hardware [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 894.897422] env[61273]: DEBUG nova.virt.hardware [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 894.897595] env[61273]: DEBUG nova.virt.hardware [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 894.897781] env[61273]: DEBUG nova.virt.hardware [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 894.897925] env[61273]: DEBUG nova.virt.hardware [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 894.898068] env[61273]: DEBUG nova.virt.hardware [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 894.898271] env[61273]: DEBUG nova.virt.hardware [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 894.898427] env[61273]: DEBUG nova.virt.hardware [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 894.898592] env[61273]: DEBUG nova.virt.hardware [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 894.898752] env[61273]: DEBUG nova.virt.hardware [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 894.898952] env[61273]: DEBUG nova.virt.hardware [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 894.899862] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a675a91-f536-4ae4-b72c-622b9b8ee885 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.907822] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95395f17-0f9a-48fc-948d-0c4bf124a6e2 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.921425] env[61273]: ERROR nova.compute.manager [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b746e4c7-581b-4e6f-8b47-cc4c45268a37, please check neutron logs for more information. [ 894.921425] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] Traceback (most recent call last): [ 894.921425] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 894.921425] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] yield resources [ 894.921425] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 894.921425] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] self.driver.spawn(context, instance, image_meta, [ 894.921425] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 894.921425] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 894.921425] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 894.921425] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] vm_ref = self.build_virtual_machine(instance, [ 894.921425] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 894.921891] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] vif_infos = vmwarevif.get_vif_info(self._session, [ 894.921891] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 894.921891] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] for vif in network_info: [ 894.921891] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 894.921891] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] return self._sync_wrapper(fn, *args, **kwargs) [ 894.921891] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 894.921891] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] self.wait() [ 894.921891] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 894.921891] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] self[:] = self._gt.wait() [ 894.921891] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 894.921891] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] return self._exit_event.wait() [ 894.921891] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 894.921891] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] current.throw(*self._exc) [ 894.922343] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 894.922343] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] result = function(*args, **kwargs) [ 894.922343] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 894.922343] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] return func(*args, **kwargs) [ 894.922343] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 894.922343] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] raise e [ 894.922343] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 894.922343] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] nwinfo = self.network_api.allocate_for_instance( [ 894.922343] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 894.922343] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] created_port_ids = self._update_ports_for_instance( [ 894.922343] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 894.922343] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] with excutils.save_and_reraise_exception(): [ 894.922343] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 894.922825] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] self.force_reraise() [ 894.922825] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 894.922825] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] raise self.value [ 894.922825] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 894.922825] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] updated_port = self._update_port( [ 894.922825] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 894.922825] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] _ensure_no_port_binding_failure(port) [ 894.922825] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 894.922825] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] raise exception.PortBindingFailed(port_id=port['id']) [ 894.922825] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] nova.exception.PortBindingFailed: Binding failed for port b746e4c7-581b-4e6f-8b47-cc4c45268a37, please check neutron logs for more information. [ 894.922825] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] [ 894.922825] env[61273]: INFO nova.compute.manager [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] Terminating instance [ 894.923803] env[61273]: DEBUG oslo_concurrency.lockutils [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] Acquiring lock "refresh_cache-d3dafd33-91f8-481d-8f40-8c2e98a7587d" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 894.923943] env[61273]: DEBUG oslo_concurrency.lockutils [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] Acquired lock "refresh_cache-d3dafd33-91f8-481d-8f40-8c2e98a7587d" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 894.924125] env[61273]: DEBUG nova.network.neutron [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 894.924533] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] Expecting reply to msg 915fcb9024464ac59c0e931ffb3b307c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 894.930950] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 915fcb9024464ac59c0e931ffb3b307c [ 895.033315] env[61273]: DEBUG nova.network.neutron [-] [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 895.033777] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 1c9bf3fbeac24f7291e8ce8592bfbb64 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 895.041600] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1c9bf3fbeac24f7291e8ce8592bfbb64 [ 895.098322] env[61273]: DEBUG nova.network.neutron [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 895.201332] env[61273]: DEBUG nova.network.neutron [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 895.201949] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg f1577d2b23314c3f87293f868fa35b36 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 895.210298] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f1577d2b23314c3f87293f868fa35b36 [ 895.227093] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3093eef7-71cc-42ff-847b-c422b39e4e5b {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.234506] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3d2ae88-374c-4fa0-9268-22c868d76938 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.266097] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3e67588-4cbf-4ee1-aa74-b57e6cfd6ff9 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.272688] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d8d7eb7-b892-44f8-8126-442c9ace37f6 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.285650] env[61273]: DEBUG nova.compute.provider_tree [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 895.286190] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg e02b6fae203c4bca9c4f807ce69edd4b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 895.292894] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e02b6fae203c4bca9c4f807ce69edd4b [ 895.442165] env[61273]: DEBUG nova.network.neutron [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 895.512845] env[61273]: DEBUG nova.network.neutron [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 895.513377] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] Expecting reply to msg e2947516a7df4a24b82158c95566a93c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 895.521549] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e2947516a7df4a24b82158c95566a93c [ 895.536149] env[61273]: INFO nova.compute.manager [-] [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] Took 1.02 seconds to deallocate network for instance. [ 895.538535] env[61273]: DEBUG nova.compute.claims [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 895.538711] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 895.703985] env[61273]: DEBUG oslo_concurrency.lockutils [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Releasing lock "refresh_cache-6182ea84-d5f2-4f01-9091-3d7b0b096d7c" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 895.704297] env[61273]: DEBUG nova.compute.manager [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 895.704493] env[61273]: DEBUG nova.compute.manager [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 895.704656] env[61273]: DEBUG nova.network.neutron [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 895.720451] env[61273]: DEBUG nova.network.neutron [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 895.721016] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg 17581fa8ecc54966b34efea985357e24 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 895.727526] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 17581fa8ecc54966b34efea985357e24 [ 895.789040] env[61273]: DEBUG nova.scheduler.client.report [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 895.791445] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg a164c547e470453f816a904c54ee050a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 895.807730] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a164c547e470453f816a904c54ee050a [ 896.016182] env[61273]: DEBUG oslo_concurrency.lockutils [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] Releasing lock "refresh_cache-d3dafd33-91f8-481d-8f40-8c2e98a7587d" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 896.016625] env[61273]: DEBUG nova.compute.manager [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 896.016820] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 896.017526] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6bc7abc5-1a85-48f9-bccb-69fcd14a0607 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.026081] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19656de8-7469-4d6e-8dcb-01176f0b4d12 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.047607] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d3dafd33-91f8-481d-8f40-8c2e98a7587d could not be found. [ 896.047810] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 896.047990] env[61273]: INFO nova.compute.manager [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] Took 0.03 seconds to destroy the instance on the hypervisor. [ 896.048276] env[61273]: DEBUG oslo.service.loopingcall [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 896.048492] env[61273]: DEBUG nova.compute.manager [-] [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 896.048584] env[61273]: DEBUG nova.network.neutron [-] [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 896.064219] env[61273]: DEBUG nova.network.neutron [-] [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 896.064713] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg e79c823731a04a5093fd53470c5458e6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 896.071731] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e79c823731a04a5093fd53470c5458e6 [ 896.223081] env[61273]: DEBUG nova.network.neutron [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 896.223611] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg 9d1ef6dddb544b609dd9ed1efc95925b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 896.232243] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9d1ef6dddb544b609dd9ed1efc95925b [ 896.294105] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.716s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 896.294689] env[61273]: ERROR nova.compute.manager [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 038957b1-b320-43d4-bfde-f11c360acfb3, please check neutron logs for more information. [ 896.294689] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] Traceback (most recent call last): [ 896.294689] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 896.294689] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] self.driver.spawn(context, instance, image_meta, [ 896.294689] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 896.294689] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 896.294689] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 896.294689] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] vm_ref = self.build_virtual_machine(instance, [ 896.294689] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 896.294689] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] vif_infos = vmwarevif.get_vif_info(self._session, [ 896.294689] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 896.295115] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] for vif in network_info: [ 896.295115] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 896.295115] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] return self._sync_wrapper(fn, *args, **kwargs) [ 896.295115] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 896.295115] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] self.wait() [ 896.295115] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 896.295115] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] self[:] = self._gt.wait() [ 896.295115] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 896.295115] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] return self._exit_event.wait() [ 896.295115] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 896.295115] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] current.throw(*self._exc) [ 896.295115] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 896.295115] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] result = function(*args, **kwargs) [ 896.295499] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 896.295499] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] return func(*args, **kwargs) [ 896.295499] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 896.295499] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] raise e [ 896.295499] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 896.295499] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] nwinfo = self.network_api.allocate_for_instance( [ 896.295499] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 896.295499] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] created_port_ids = self._update_ports_for_instance( [ 896.295499] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 896.295499] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] with excutils.save_and_reraise_exception(): [ 896.295499] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 896.295499] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] self.force_reraise() [ 896.295499] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 896.296039] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] raise self.value [ 896.296039] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 896.296039] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] updated_port = self._update_port( [ 896.296039] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 896.296039] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] _ensure_no_port_binding_failure(port) [ 896.296039] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 896.296039] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] raise exception.PortBindingFailed(port_id=port['id']) [ 896.296039] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] nova.exception.PortBindingFailed: Binding failed for port 038957b1-b320-43d4-bfde-f11c360acfb3, please check neutron logs for more information. [ 896.296039] env[61273]: ERROR nova.compute.manager [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] [ 896.296039] env[61273]: DEBUG nova.compute.utils [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] Binding failed for port 038957b1-b320-43d4-bfde-f11c360acfb3, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 896.296504] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.552s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 896.298008] env[61273]: INFO nova.compute.claims [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 896.299640] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg 1fe4ce62890e4e359790f8f722d3fa04 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 896.300819] env[61273]: DEBUG nova.compute.manager [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] Build of instance 27e43d79-6435-46fb-ac71-9be7313d591a was re-scheduled: Binding failed for port 038957b1-b320-43d4-bfde-f11c360acfb3, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 896.301233] env[61273]: DEBUG nova.compute.manager [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 896.301447] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Acquiring lock "refresh_cache-27e43d79-6435-46fb-ac71-9be7313d591a" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 896.301589] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Acquired lock "refresh_cache-27e43d79-6435-46fb-ac71-9be7313d591a" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 896.301768] env[61273]: DEBUG nova.network.neutron [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 896.302132] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 49c1b5f3f25e46198f0dcd23179c2c94 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 896.307906] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 49c1b5f3f25e46198f0dcd23179c2c94 [ 896.333089] env[61273]: DEBUG nova.compute.manager [req-41cfe075-2b4e-4b1b-9ac9-f7cb0d8ba2b8 req-d81c89ba-16f5-4ce2-8720-95ac04593889 service nova] [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] Received event network-changed-b746e4c7-581b-4e6f-8b47-cc4c45268a37 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 896.333282] env[61273]: DEBUG nova.compute.manager [req-41cfe075-2b4e-4b1b-9ac9-f7cb0d8ba2b8 req-d81c89ba-16f5-4ce2-8720-95ac04593889 service nova] [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] Refreshing instance network info cache due to event network-changed-b746e4c7-581b-4e6f-8b47-cc4c45268a37. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 896.333717] env[61273]: DEBUG oslo_concurrency.lockutils [req-41cfe075-2b4e-4b1b-9ac9-f7cb0d8ba2b8 req-d81c89ba-16f5-4ce2-8720-95ac04593889 service nova] Acquiring lock "refresh_cache-d3dafd33-91f8-481d-8f40-8c2e98a7587d" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 896.333717] env[61273]: DEBUG oslo_concurrency.lockutils [req-41cfe075-2b4e-4b1b-9ac9-f7cb0d8ba2b8 req-d81c89ba-16f5-4ce2-8720-95ac04593889 service nova] Acquired lock "refresh_cache-d3dafd33-91f8-481d-8f40-8c2e98a7587d" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 896.333862] env[61273]: DEBUG nova.network.neutron [req-41cfe075-2b4e-4b1b-9ac9-f7cb0d8ba2b8 req-d81c89ba-16f5-4ce2-8720-95ac04593889 service nova] [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] Refreshing network info cache for port b746e4c7-581b-4e6f-8b47-cc4c45268a37 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 896.334250] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-41cfe075-2b4e-4b1b-9ac9-f7cb0d8ba2b8 req-d81c89ba-16f5-4ce2-8720-95ac04593889 service nova] Expecting reply to msg 95b676c3e8d44bdd970de62e9cfb1fe8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 896.335163] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1fe4ce62890e4e359790f8f722d3fa04 [ 896.341032] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 95b676c3e8d44bdd970de62e9cfb1fe8 [ 896.567068] env[61273]: DEBUG nova.network.neutron [-] [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 896.567559] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg c8c48681005e46dc97af8b4442c6464c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 896.574866] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c8c48681005e46dc97af8b4442c6464c [ 896.725889] env[61273]: INFO nova.compute.manager [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] [instance: 6182ea84-d5f2-4f01-9091-3d7b0b096d7c] Took 1.02 seconds to deallocate network for instance. [ 896.727571] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg 29592c6984524d9f926eac0fa133ded6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 896.758744] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 29592c6984524d9f926eac0fa133ded6 [ 896.808207] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg e134ca6c3b094e288fdc9fa6d196f494 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 896.815561] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e134ca6c3b094e288fdc9fa6d196f494 [ 896.822982] env[61273]: DEBUG nova.network.neutron [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 896.852118] env[61273]: DEBUG nova.network.neutron [req-41cfe075-2b4e-4b1b-9ac9-f7cb0d8ba2b8 req-d81c89ba-16f5-4ce2-8720-95ac04593889 service nova] [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 896.892260] env[61273]: DEBUG nova.network.neutron [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 896.893003] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg c2cc2b61f725417baab4341678e34460 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 896.900800] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c2cc2b61f725417baab4341678e34460 [ 896.916727] env[61273]: DEBUG nova.network.neutron [req-41cfe075-2b4e-4b1b-9ac9-f7cb0d8ba2b8 req-d81c89ba-16f5-4ce2-8720-95ac04593889 service nova] [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 896.917234] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-41cfe075-2b4e-4b1b-9ac9-f7cb0d8ba2b8 req-d81c89ba-16f5-4ce2-8720-95ac04593889 service nova] Expecting reply to msg f7bb3e03fdd84eb690c3f88bf148c90b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 896.925809] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f7bb3e03fdd84eb690c3f88bf148c90b [ 897.069296] env[61273]: INFO nova.compute.manager [-] [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] Took 1.02 seconds to deallocate network for instance. [ 897.072509] env[61273]: DEBUG nova.compute.claims [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 897.072739] env[61273]: DEBUG oslo_concurrency.lockutils [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 897.232344] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg 2c306b531e34422d9eb4de4148287b55 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 897.261005] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2c306b531e34422d9eb4de4148287b55 [ 897.396074] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Releasing lock "refresh_cache-27e43d79-6435-46fb-ac71-9be7313d591a" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 897.396326] env[61273]: DEBUG nova.compute.manager [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 897.396508] env[61273]: DEBUG nova.compute.manager [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 897.396672] env[61273]: DEBUG nova.network.neutron [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 897.418696] env[61273]: DEBUG nova.network.neutron [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 897.419338] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 0cd994b0598140f79db6390b0c336ae5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 897.420510] env[61273]: DEBUG oslo_concurrency.lockutils [req-41cfe075-2b4e-4b1b-9ac9-f7cb0d8ba2b8 req-d81c89ba-16f5-4ce2-8720-95ac04593889 service nova] Releasing lock "refresh_cache-d3dafd33-91f8-481d-8f40-8c2e98a7587d" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 897.420731] env[61273]: DEBUG nova.compute.manager [req-41cfe075-2b4e-4b1b-9ac9-f7cb0d8ba2b8 req-d81c89ba-16f5-4ce2-8720-95ac04593889 service nova] [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] Received event network-vif-deleted-b746e4c7-581b-4e6f-8b47-cc4c45268a37 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 897.427636] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0cd994b0598140f79db6390b0c336ae5 [ 897.487288] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1455f5d7-05ce-4242-9253-061c7cff467c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.495481] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e541aea-4c65-46c6-8d69-26b0c5049ff8 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.526325] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddcb5845-55ca-4a85-9aa7-7075175d24e4 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.533554] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0245eb61-c4ba-4c4b-a969-14940aee5e1a {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.546572] env[61273]: DEBUG nova.compute.provider_tree [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 897.547094] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg 803a0bb5611243dd900f9d56a9861c21 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 897.555900] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 803a0bb5611243dd900f9d56a9861c21 [ 897.754554] env[61273]: INFO nova.scheduler.client.report [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Deleted allocations for instance 6182ea84-d5f2-4f01-9091-3d7b0b096d7c [ 897.760688] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Expecting reply to msg 8bfaf90f9d914d9c97fe4cf1b7c78ba9 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 897.777855] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8bfaf90f9d914d9c97fe4cf1b7c78ba9 [ 897.922457] env[61273]: DEBUG nova.network.neutron [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 897.923024] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg cd300f1e910641fe8474a7a6f16ab090 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 897.930806] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cd300f1e910641fe8474a7a6f16ab090 [ 898.050207] env[61273]: DEBUG nova.scheduler.client.report [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 898.053720] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg b05fef2140624665bb480db08dc982de in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 898.065267] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b05fef2140624665bb480db08dc982de [ 898.263674] env[61273]: DEBUG oslo_concurrency.lockutils [None req-825cd93b-58d0-432f-be98-06744830a079 tempest-MultipleCreateTestJSON-1846072068 tempest-MultipleCreateTestJSON-1846072068-project-member] Lock "6182ea84-d5f2-4f01-9091-3d7b0b096d7c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 106.778s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 898.263674] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 0789017031534565b11c7ef92f227f91 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 898.272876] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0789017031534565b11c7ef92f227f91 [ 898.425302] env[61273]: INFO nova.compute.manager [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 27e43d79-6435-46fb-ac71-9be7313d591a] Took 1.03 seconds to deallocate network for instance. [ 898.427729] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 13a603f819514bf6881d71c8895dfb2c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 898.465026] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 13a603f819514bf6881d71c8895dfb2c [ 898.555800] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.259s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 898.556548] env[61273]: DEBUG nova.compute.manager [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 898.558468] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg 77b8b83482c344119149e3a2e126bbcf in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 898.559717] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.104s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 898.561829] env[61273]: INFO nova.compute.claims [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 898.563592] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg 99db655e0bb84eb0bbe8aeb51a9f2514 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 898.605025] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 77b8b83482c344119149e3a2e126bbcf [ 898.613010] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 99db655e0bb84eb0bbe8aeb51a9f2514 [ 898.765223] env[61273]: DEBUG nova.compute.manager [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: a70f220a-fa34-44af-939f-29292b556897] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 898.767481] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 7aa385ab5a51482c99218a44bb629c3f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 898.799892] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7aa385ab5a51482c99218a44bb629c3f [ 898.932776] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 8572cdacfa9b48df9eb61a4add12c26a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 898.974606] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8572cdacfa9b48df9eb61a4add12c26a [ 899.068438] env[61273]: DEBUG nova.compute.utils [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 899.069237] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg 3deee897f48e41888c208a794349c9b8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 899.071845] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg 70a03b5973ef40a9bda61fa1ffa1ec92 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 899.072998] env[61273]: DEBUG nova.compute.manager [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 899.073295] env[61273]: DEBUG nova.network.neutron [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 899.080151] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 70a03b5973ef40a9bda61fa1ffa1ec92 [ 899.084343] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3deee897f48e41888c208a794349c9b8 [ 899.110633] env[61273]: DEBUG nova.policy [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '77bd4b765f214cb8bb602e8e52071531', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4a752424d76840dabab55a9202e7a635', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 899.289265] env[61273]: DEBUG oslo_concurrency.lockutils [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 899.390996] env[61273]: DEBUG nova.network.neutron [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] Successfully created port: 1c1b5e97-af36-4179-8875-68e15da56408 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 899.455892] env[61273]: INFO nova.scheduler.client.report [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Deleted allocations for instance 27e43d79-6435-46fb-ac71-9be7313d591a [ 899.461706] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg c2ece94ec0674ae486bcb438c410b547 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 899.486699] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c2ece94ec0674ae486bcb438c410b547 [ 899.579592] env[61273]: DEBUG nova.compute.manager [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 899.579592] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg d7d453d8eaa1403f94b3ba31c8a331b5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 899.630330] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d7d453d8eaa1403f94b3ba31c8a331b5 [ 899.735176] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46604258-1eb2-48de-b140-ce71e0331830 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.742899] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e3fae30-c1bb-47ad-a082-604bdd3215c9 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.773577] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bf26a38-f74c-4cfa-9262-c6edabd6e24c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.780813] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9038e7c7-9d72-4b9c-a5b8-238ea77c013d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.794931] env[61273]: DEBUG nova.compute.provider_tree [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 899.795418] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg 970c9bf047a044cda4f29de76e5a9a77 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 899.801968] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 970c9bf047a044cda4f29de76e5a9a77 [ 899.963650] env[61273]: DEBUG oslo_concurrency.lockutils [None req-f051d1e7-6a12-4ce0-b0a9-15b42a1b6c11 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Lock "27e43d79-6435-46fb-ac71-9be7313d591a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 107.617s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 899.964281] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Expecting reply to msg 4431ba1dbd1f4c9c9a789881b4dd9e7c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 899.974632] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4431ba1dbd1f4c9c9a789881b4dd9e7c [ 900.084236] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg 61ec70e03f67497c9681238d1fc0db2a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 900.120578] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 61ec70e03f67497c9681238d1fc0db2a [ 900.298424] env[61273]: DEBUG nova.scheduler.client.report [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 900.302071] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg aac80ecd3579407783ad37d8b1462f43 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 900.315728] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aac80ecd3579407783ad37d8b1462f43 [ 900.467460] env[61273]: DEBUG nova.compute.manager [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 900.469472] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Expecting reply to msg cf3722a7a3c342beb51c73004385f04b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 900.503377] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cf3722a7a3c342beb51c73004385f04b [ 900.547896] env[61273]: DEBUG nova.compute.manager [req-b20d9880-c611-4c7b-94ec-d6be84346bb6 req-3e84ee48-c2e0-4ed4-a36f-6e8f87e666df service nova] [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] Received event network-changed-1c1b5e97-af36-4179-8875-68e15da56408 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 900.548111] env[61273]: DEBUG nova.compute.manager [req-b20d9880-c611-4c7b-94ec-d6be84346bb6 req-3e84ee48-c2e0-4ed4-a36f-6e8f87e666df service nova] [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] Refreshing instance network info cache due to event network-changed-1c1b5e97-af36-4179-8875-68e15da56408. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 900.548329] env[61273]: DEBUG oslo_concurrency.lockutils [req-b20d9880-c611-4c7b-94ec-d6be84346bb6 req-3e84ee48-c2e0-4ed4-a36f-6e8f87e666df service nova] Acquiring lock "refresh_cache-f9e23014-2fe0-4aab-b03c-8759dc1e5eb0" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 900.548468] env[61273]: DEBUG oslo_concurrency.lockutils [req-b20d9880-c611-4c7b-94ec-d6be84346bb6 req-3e84ee48-c2e0-4ed4-a36f-6e8f87e666df service nova] Acquired lock "refresh_cache-f9e23014-2fe0-4aab-b03c-8759dc1e5eb0" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 900.548856] env[61273]: DEBUG nova.network.neutron [req-b20d9880-c611-4c7b-94ec-d6be84346bb6 req-3e84ee48-c2e0-4ed4-a36f-6e8f87e666df service nova] [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] Refreshing network info cache for port 1c1b5e97-af36-4179-8875-68e15da56408 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 900.549355] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-b20d9880-c611-4c7b-94ec-d6be84346bb6 req-3e84ee48-c2e0-4ed4-a36f-6e8f87e666df service nova] Expecting reply to msg f6948a1438184d5fbfbbe352b1efdadd in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 900.559783] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f6948a1438184d5fbfbbe352b1efdadd [ 900.587392] env[61273]: DEBUG nova.compute.manager [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 900.612894] env[61273]: DEBUG nova.virt.hardware [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 900.613151] env[61273]: DEBUG nova.virt.hardware [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 900.613309] env[61273]: DEBUG nova.virt.hardware [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 900.613487] env[61273]: DEBUG nova.virt.hardware [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 900.613631] env[61273]: DEBUG nova.virt.hardware [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 900.613777] env[61273]: DEBUG nova.virt.hardware [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 900.613980] env[61273]: DEBUG nova.virt.hardware [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 900.614134] env[61273]: DEBUG nova.virt.hardware [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 900.614294] env[61273]: DEBUG nova.virt.hardware [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 900.614453] env[61273]: DEBUG nova.virt.hardware [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 900.614699] env[61273]: DEBUG nova.virt.hardware [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 900.615875] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a80a98e-9f75-4ceb-9d87-5b6869f4ad9b {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.624424] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-003d6788-515b-440f-83da-62023a18c150 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.775898] env[61273]: ERROR nova.compute.manager [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 1c1b5e97-af36-4179-8875-68e15da56408, please check neutron logs for more information. [ 900.775898] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 900.775898] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 900.775898] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 900.775898] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 900.775898] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 900.775898] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 900.775898] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 900.775898] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 900.775898] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 900.775898] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 900.775898] env[61273]: ERROR nova.compute.manager raise self.value [ 900.775898] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 900.775898] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 900.775898] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 900.775898] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 900.776601] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 900.776601] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 900.776601] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 1c1b5e97-af36-4179-8875-68e15da56408, please check neutron logs for more information. [ 900.776601] env[61273]: ERROR nova.compute.manager [ 900.776601] env[61273]: Traceback (most recent call last): [ 900.776601] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 900.776601] env[61273]: listener.cb(fileno) [ 900.776601] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 900.776601] env[61273]: result = function(*args, **kwargs) [ 900.776601] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 900.776601] env[61273]: return func(*args, **kwargs) [ 900.776601] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 900.776601] env[61273]: raise e [ 900.776601] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 900.776601] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 900.776601] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 900.776601] env[61273]: created_port_ids = self._update_ports_for_instance( [ 900.776601] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 900.776601] env[61273]: with excutils.save_and_reraise_exception(): [ 900.776601] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 900.776601] env[61273]: self.force_reraise() [ 900.776601] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 900.776601] env[61273]: raise self.value [ 900.776601] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 900.776601] env[61273]: updated_port = self._update_port( [ 900.776601] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 900.776601] env[61273]: _ensure_no_port_binding_failure(port) [ 900.776601] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 900.776601] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 900.777551] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 1c1b5e97-af36-4179-8875-68e15da56408, please check neutron logs for more information. [ 900.777551] env[61273]: Removing descriptor: 15 [ 900.777551] env[61273]: ERROR nova.compute.manager [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 1c1b5e97-af36-4179-8875-68e15da56408, please check neutron logs for more information. [ 900.777551] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] Traceback (most recent call last): [ 900.777551] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 900.777551] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] yield resources [ 900.777551] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 900.777551] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] self.driver.spawn(context, instance, image_meta, [ 900.777551] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 900.777551] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 900.777551] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 900.777551] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] vm_ref = self.build_virtual_machine(instance, [ 900.777907] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 900.777907] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] vif_infos = vmwarevif.get_vif_info(self._session, [ 900.777907] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 900.777907] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] for vif in network_info: [ 900.777907] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 900.777907] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] return self._sync_wrapper(fn, *args, **kwargs) [ 900.777907] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 900.777907] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] self.wait() [ 900.777907] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 900.777907] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] self[:] = self._gt.wait() [ 900.777907] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 900.777907] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] return self._exit_event.wait() [ 900.777907] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 900.778276] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] result = hub.switch() [ 900.778276] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 900.778276] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] return self.greenlet.switch() [ 900.778276] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 900.778276] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] result = function(*args, **kwargs) [ 900.778276] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 900.778276] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] return func(*args, **kwargs) [ 900.778276] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 900.778276] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] raise e [ 900.778276] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 900.778276] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] nwinfo = self.network_api.allocate_for_instance( [ 900.778276] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 900.778276] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] created_port_ids = self._update_ports_for_instance( [ 900.778650] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 900.778650] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] with excutils.save_and_reraise_exception(): [ 900.778650] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 900.778650] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] self.force_reraise() [ 900.778650] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 900.778650] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] raise self.value [ 900.778650] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 900.778650] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] updated_port = self._update_port( [ 900.778650] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 900.778650] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] _ensure_no_port_binding_failure(port) [ 900.778650] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 900.778650] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] raise exception.PortBindingFailed(port_id=port['id']) [ 900.778989] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] nova.exception.PortBindingFailed: Binding failed for port 1c1b5e97-af36-4179-8875-68e15da56408, please check neutron logs for more information. [ 900.778989] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] [ 900.778989] env[61273]: INFO nova.compute.manager [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] Terminating instance [ 900.779893] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Acquiring lock "refresh_cache-f9e23014-2fe0-4aab-b03c-8759dc1e5eb0" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 900.805149] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.245s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 900.805800] env[61273]: DEBUG nova.compute.manager [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 900.807805] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg b8c58d83f9bd491ca3e17a1dcf2408d3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 900.810196] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.602s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 900.811624] env[61273]: INFO nova.compute.claims [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 900.813337] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 1798f1e7611c4c108ae1b5eb33632c91 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 900.850070] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b8c58d83f9bd491ca3e17a1dcf2408d3 [ 900.853907] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1798f1e7611c4c108ae1b5eb33632c91 [ 900.999472] env[61273]: DEBUG oslo_concurrency.lockutils [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 901.071120] env[61273]: DEBUG nova.network.neutron [req-b20d9880-c611-4c7b-94ec-d6be84346bb6 req-3e84ee48-c2e0-4ed4-a36f-6e8f87e666df service nova] [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 901.141929] env[61273]: DEBUG nova.network.neutron [req-b20d9880-c611-4c7b-94ec-d6be84346bb6 req-3e84ee48-c2e0-4ed4-a36f-6e8f87e666df service nova] [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 901.142461] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-b20d9880-c611-4c7b-94ec-d6be84346bb6 req-3e84ee48-c2e0-4ed4-a36f-6e8f87e666df service nova] Expecting reply to msg 73980491e2e54f6ebd8a954508360800 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 901.155112] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 73980491e2e54f6ebd8a954508360800 [ 901.316388] env[61273]: DEBUG nova.compute.utils [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 901.317208] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg b056d83dfd854b8f80841025d3239af9 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 901.319423] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg d9e064a7d30f401bb2b74f8bf39e700a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 901.320540] env[61273]: DEBUG nova.compute.manager [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 901.322686] env[61273]: DEBUG nova.network.neutron [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 901.327802] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b056d83dfd854b8f80841025d3239af9 [ 901.332818] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d9e064a7d30f401bb2b74f8bf39e700a [ 901.362585] env[61273]: DEBUG nova.policy [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'afd2b293ac5747749b0bae2b787277ca', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6b8d7d7387e44003b6b4bc488c7900f3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 901.644548] env[61273]: DEBUG oslo_concurrency.lockutils [req-b20d9880-c611-4c7b-94ec-d6be84346bb6 req-3e84ee48-c2e0-4ed4-a36f-6e8f87e666df service nova] Releasing lock "refresh_cache-f9e23014-2fe0-4aab-b03c-8759dc1e5eb0" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 901.644975] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Acquired lock "refresh_cache-f9e23014-2fe0-4aab-b03c-8759dc1e5eb0" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.645171] env[61273]: DEBUG nova.network.neutron [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 901.645617] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg 96489eba5b7d48d6b08c3063578c6c51 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 901.652671] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 96489eba5b7d48d6b08c3063578c6c51 [ 901.656053] env[61273]: DEBUG nova.network.neutron [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] Successfully created port: 39750689-9293-4d1a-b24b-04470262e9e6 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 901.821273] env[61273]: DEBUG nova.compute.manager [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 901.823502] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg b873d871c7cb4013a259bdce878ddad4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 901.867063] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b873d871c7cb4013a259bdce878ddad4 [ 901.989142] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf632bc0-7fa3-4f34-8501-5d836c86a5da {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.997719] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa743034-4f7d-4f27-91de-0a43e73f55c1 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.031106] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a99da928-40cd-4780-ae6b-2e374c542817 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.041979] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-706e02ef-c24f-4794-8dae-8800378ec6e3 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.057172] env[61273]: DEBUG nova.compute.provider_tree [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 902.057863] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 02df130939474817a69de2f098e43c6d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 902.066312] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 02df130939474817a69de2f098e43c6d [ 902.066908] env[61273]: DEBUG oslo_concurrency.lockutils [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Acquiring lock "77ca81dc-6322-41de-aaee-adf36d6ce79f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 902.067130] env[61273]: DEBUG oslo_concurrency.lockutils [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Lock "77ca81dc-6322-41de-aaee-adf36d6ce79f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 902.162604] env[61273]: DEBUG nova.network.neutron [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 902.267924] env[61273]: DEBUG nova.network.neutron [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 902.268493] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg 532da348c4aa419ea867d581a25e5c35 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 902.276571] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 532da348c4aa419ea867d581a25e5c35 [ 902.328616] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg 2fd245ad6b4f411b9fbad1c9959f4d35 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 902.357819] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2fd245ad6b4f411b9fbad1c9959f4d35 [ 902.435247] env[61273]: ERROR nova.compute.manager [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 39750689-9293-4d1a-b24b-04470262e9e6, please check neutron logs for more information. [ 902.435247] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 902.435247] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 902.435247] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 902.435247] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 902.435247] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 902.435247] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 902.435247] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 902.435247] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 902.435247] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 902.435247] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 902.435247] env[61273]: ERROR nova.compute.manager raise self.value [ 902.435247] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 902.435247] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 902.435247] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 902.435247] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 902.435735] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 902.435735] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 902.435735] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 39750689-9293-4d1a-b24b-04470262e9e6, please check neutron logs for more information. [ 902.435735] env[61273]: ERROR nova.compute.manager [ 902.436035] env[61273]: Traceback (most recent call last): [ 902.436070] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 902.436070] env[61273]: listener.cb(fileno) [ 902.436070] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 902.436070] env[61273]: result = function(*args, **kwargs) [ 902.436070] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 902.436070] env[61273]: return func(*args, **kwargs) [ 902.436070] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 902.436070] env[61273]: raise e [ 902.436070] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 902.436070] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 902.436070] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 902.436070] env[61273]: created_port_ids = self._update_ports_for_instance( [ 902.436070] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 902.436070] env[61273]: with excutils.save_and_reraise_exception(): [ 902.436070] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 902.436070] env[61273]: self.force_reraise() [ 902.436070] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 902.436070] env[61273]: raise self.value [ 902.436070] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 902.436070] env[61273]: updated_port = self._update_port( [ 902.436070] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 902.436070] env[61273]: _ensure_no_port_binding_failure(port) [ 902.436070] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 902.436070] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 902.436657] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 39750689-9293-4d1a-b24b-04470262e9e6, please check neutron logs for more information. [ 902.436657] env[61273]: Removing descriptor: 15 [ 902.561410] env[61273]: DEBUG nova.scheduler.client.report [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 902.563437] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 6589214869194f2daf0097551bd0b3b9 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 902.579623] env[61273]: DEBUG nova.compute.manager [req-4d94a5d0-d4f9-4404-b3b4-10c66312986f req-24d04b0d-f7d2-4a44-88d7-9768ae5cba63 service nova] [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] Received event network-vif-deleted-1c1b5e97-af36-4179-8875-68e15da56408 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 902.579623] env[61273]: DEBUG nova.compute.manager [req-4d94a5d0-d4f9-4404-b3b4-10c66312986f req-24d04b0d-f7d2-4a44-88d7-9768ae5cba63 service nova] [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] Received event network-changed-39750689-9293-4d1a-b24b-04470262e9e6 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 902.579623] env[61273]: DEBUG nova.compute.manager [req-4d94a5d0-d4f9-4404-b3b4-10c66312986f req-24d04b0d-f7d2-4a44-88d7-9768ae5cba63 service nova] [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] Refreshing instance network info cache due to event network-changed-39750689-9293-4d1a-b24b-04470262e9e6. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 902.579623] env[61273]: DEBUG oslo_concurrency.lockutils [req-4d94a5d0-d4f9-4404-b3b4-10c66312986f req-24d04b0d-f7d2-4a44-88d7-9768ae5cba63 service nova] Acquiring lock "refresh_cache-d4a2025d-c128-45a2-b74c-a7fd2630d615" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 902.579623] env[61273]: DEBUG oslo_concurrency.lockutils [req-4d94a5d0-d4f9-4404-b3b4-10c66312986f req-24d04b0d-f7d2-4a44-88d7-9768ae5cba63 service nova] Acquired lock "refresh_cache-d4a2025d-c128-45a2-b74c-a7fd2630d615" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 902.579830] env[61273]: DEBUG nova.network.neutron [req-4d94a5d0-d4f9-4404-b3b4-10c66312986f req-24d04b0d-f7d2-4a44-88d7-9768ae5cba63 service nova] [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] Refreshing network info cache for port 39750689-9293-4d1a-b24b-04470262e9e6 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 902.579830] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-4d94a5d0-d4f9-4404-b3b4-10c66312986f req-24d04b0d-f7d2-4a44-88d7-9768ae5cba63 service nova] Expecting reply to msg e6dd32ec1cc34fc883b18a8add6cbb45 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 902.580689] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6589214869194f2daf0097551bd0b3b9 [ 902.586318] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e6dd32ec1cc34fc883b18a8add6cbb45 [ 902.770776] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Releasing lock "refresh_cache-f9e23014-2fe0-4aab-b03c-8759dc1e5eb0" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 902.771217] env[61273]: DEBUG nova.compute.manager [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 902.771404] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 902.771772] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5ddd6c59-dbec-47f7-afa2-652e264ba995 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.780508] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49da9656-f3a2-49ce-b1b6-4e44684ca872 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.802209] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f9e23014-2fe0-4aab-b03c-8759dc1e5eb0 could not be found. [ 902.802423] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 902.802621] env[61273]: INFO nova.compute.manager [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] Took 0.03 seconds to destroy the instance on the hypervisor. [ 902.803528] env[61273]: DEBUG oslo.service.loopingcall [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 902.803528] env[61273]: DEBUG nova.compute.manager [-] [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 902.803528] env[61273]: DEBUG nova.network.neutron [-] [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 902.818591] env[61273]: DEBUG nova.network.neutron [-] [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 902.819096] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg ced397c36bdb456e9ad21705156e6765 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 902.826975] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ced397c36bdb456e9ad21705156e6765 [ 902.831778] env[61273]: DEBUG nova.compute.manager [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 902.858547] env[61273]: DEBUG nova.virt.hardware [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 902.858794] env[61273]: DEBUG nova.virt.hardware [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 902.858954] env[61273]: DEBUG nova.virt.hardware [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 902.859130] env[61273]: DEBUG nova.virt.hardware [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 902.859270] env[61273]: DEBUG nova.virt.hardware [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 902.859413] env[61273]: DEBUG nova.virt.hardware [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 902.859607] env[61273]: DEBUG nova.virt.hardware [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 902.859765] env[61273]: DEBUG nova.virt.hardware [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 902.859929] env[61273]: DEBUG nova.virt.hardware [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 902.860103] env[61273]: DEBUG nova.virt.hardware [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 902.860276] env[61273]: DEBUG nova.virt.hardware [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 902.861196] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-788a94e7-53b8-4c1d-9608-31c828930f4d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.868581] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9f1fb7b-7fe9-456b-84fe-9cfe944e8670 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.881836] env[61273]: ERROR nova.compute.manager [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 39750689-9293-4d1a-b24b-04470262e9e6, please check neutron logs for more information. [ 902.881836] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] Traceback (most recent call last): [ 902.881836] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 902.881836] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] yield resources [ 902.881836] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 902.881836] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] self.driver.spawn(context, instance, image_meta, [ 902.881836] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 902.881836] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] self._vmops.spawn(context, instance, image_meta, injected_files, [ 902.881836] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 902.881836] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] vm_ref = self.build_virtual_machine(instance, [ 902.881836] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 902.882185] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] vif_infos = vmwarevif.get_vif_info(self._session, [ 902.882185] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 902.882185] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] for vif in network_info: [ 902.882185] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 902.882185] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] return self._sync_wrapper(fn, *args, **kwargs) [ 902.882185] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 902.882185] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] self.wait() [ 902.882185] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 902.882185] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] self[:] = self._gt.wait() [ 902.882185] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 902.882185] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] return self._exit_event.wait() [ 902.882185] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 902.882185] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] current.throw(*self._exc) [ 902.882522] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 902.882522] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] result = function(*args, **kwargs) [ 902.882522] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 902.882522] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] return func(*args, **kwargs) [ 902.882522] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 902.882522] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] raise e [ 902.882522] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 902.882522] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] nwinfo = self.network_api.allocate_for_instance( [ 902.882522] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 902.882522] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] created_port_ids = self._update_ports_for_instance( [ 902.882522] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 902.882522] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] with excutils.save_and_reraise_exception(): [ 902.882522] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 902.882897] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] self.force_reraise() [ 902.882897] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 902.882897] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] raise self.value [ 902.882897] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 902.882897] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] updated_port = self._update_port( [ 902.882897] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 902.882897] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] _ensure_no_port_binding_failure(port) [ 902.882897] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 902.882897] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] raise exception.PortBindingFailed(port_id=port['id']) [ 902.882897] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] nova.exception.PortBindingFailed: Binding failed for port 39750689-9293-4d1a-b24b-04470262e9e6, please check neutron logs for more information. [ 902.882897] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] [ 902.882897] env[61273]: INFO nova.compute.manager [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] Terminating instance [ 902.883980] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Acquiring lock "refresh_cache-d4a2025d-c128-45a2-b74c-a7fd2630d615" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 903.066202] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.257s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 903.066803] env[61273]: DEBUG nova.compute.manager [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 903.068816] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg a66ac626852c41118d97973ecb959917 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 903.070516] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.002s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 903.072792] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Expecting reply to msg d1f0ae84367f4fe99852545df9f0c769 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 903.099148] env[61273]: DEBUG nova.network.neutron [req-4d94a5d0-d4f9-4404-b3b4-10c66312986f req-24d04b0d-f7d2-4a44-88d7-9768ae5cba63 service nova] [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 903.108050] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a66ac626852c41118d97973ecb959917 [ 903.108772] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d1f0ae84367f4fe99852545df9f0c769 [ 903.173275] env[61273]: DEBUG nova.network.neutron [req-4d94a5d0-d4f9-4404-b3b4-10c66312986f req-24d04b0d-f7d2-4a44-88d7-9768ae5cba63 service nova] [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 903.173861] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-4d94a5d0-d4f9-4404-b3b4-10c66312986f req-24d04b0d-f7d2-4a44-88d7-9768ae5cba63 service nova] Expecting reply to msg 6fec84ea140541d4970ebaac00c644cc in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 903.181807] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6fec84ea140541d4970ebaac00c644cc [ 903.321334] env[61273]: DEBUG nova.network.neutron [-] [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 903.321813] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 08ce339e0aad439c8ec7b52934fff102 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 903.330264] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 08ce339e0aad439c8ec7b52934fff102 [ 903.572258] env[61273]: DEBUG nova.compute.utils [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 903.572899] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg f30a87a8e91e4bbaa0fa4e22d1be0e5d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 903.573829] env[61273]: DEBUG nova.compute.manager [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 903.574009] env[61273]: DEBUG nova.network.neutron [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 903.582633] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f30a87a8e91e4bbaa0fa4e22d1be0e5d [ 903.617228] env[61273]: DEBUG nova.policy [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c471664da5894985bf7478057ea19b73', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a3de421e0f994df8b809ce0096753f23', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 903.676210] env[61273]: DEBUG oslo_concurrency.lockutils [req-4d94a5d0-d4f9-4404-b3b4-10c66312986f req-24d04b0d-f7d2-4a44-88d7-9768ae5cba63 service nova] Releasing lock "refresh_cache-d4a2025d-c128-45a2-b74c-a7fd2630d615" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 903.676532] env[61273]: DEBUG nova.compute.manager [req-4d94a5d0-d4f9-4404-b3b4-10c66312986f req-24d04b0d-f7d2-4a44-88d7-9768ae5cba63 service nova] [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] Received event network-vif-deleted-39750689-9293-4d1a-b24b-04470262e9e6 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 903.676914] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Acquired lock "refresh_cache-d4a2025d-c128-45a2-b74c-a7fd2630d615" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 903.677124] env[61273]: DEBUG nova.network.neutron [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 903.677611] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg 76bdac9ef98f4169bbf63c52c1363f47 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 903.685394] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 76bdac9ef98f4169bbf63c52c1363f47 [ 903.718716] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31ef7535-7f9b-4b1b-ac38-b3cd4ef90ea3 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.726900] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-517959f0-1d92-4af5-bfed-a17af293f40e {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.757834] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e4edf43-8efa-4fca-8075-fefffbb5209f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.765034] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4fd980a-c699-4673-8914-18151673af8f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.777741] env[61273]: DEBUG nova.compute.provider_tree [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 903.778228] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Expecting reply to msg af47203363dd446582278496382c349e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 903.785483] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg af47203363dd446582278496382c349e [ 903.823849] env[61273]: INFO nova.compute.manager [-] [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] Took 1.02 seconds to deallocate network for instance. [ 903.826575] env[61273]: DEBUG nova.compute.claims [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 903.826820] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 903.870358] env[61273]: DEBUG nova.network.neutron [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] Successfully created port: 9ea1dfca-8560-4521-8e33-50b5c0f74903 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 904.077182] env[61273]: DEBUG nova.compute.manager [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 904.078909] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 3d39ad28b4504865a5637cee4e5589c9 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 904.121959] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3d39ad28b4504865a5637cee4e5589c9 [ 904.198156] env[61273]: DEBUG nova.network.neutron [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 904.271120] env[61273]: DEBUG nova.network.neutron [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 904.271652] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg c48a877b6882411283564a4d5a1b7d31 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 904.281209] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c48a877b6882411283564a4d5a1b7d31 [ 904.285888] env[61273]: DEBUG nova.scheduler.client.report [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 904.288680] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Expecting reply to msg 47d5614998c44abf84f70f20ec36721a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 904.302380] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 47d5614998c44abf84f70f20ec36721a [ 904.583628] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 71292983137a4843a26a3971e363acb4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 904.603479] env[61273]: DEBUG nova.compute.manager [req-ddb5c51e-9221-48b2-939a-e5bf1eaeac7b req-a73514fc-6965-4b2e-99c9-e4f4ab68a869 service nova] [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] Received event network-changed-9ea1dfca-8560-4521-8e33-50b5c0f74903 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 904.603479] env[61273]: DEBUG nova.compute.manager [req-ddb5c51e-9221-48b2-939a-e5bf1eaeac7b req-a73514fc-6965-4b2e-99c9-e4f4ab68a869 service nova] [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] Refreshing instance network info cache due to event network-changed-9ea1dfca-8560-4521-8e33-50b5c0f74903. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 904.603638] env[61273]: DEBUG oslo_concurrency.lockutils [req-ddb5c51e-9221-48b2-939a-e5bf1eaeac7b req-a73514fc-6965-4b2e-99c9-e4f4ab68a869 service nova] Acquiring lock "refresh_cache-1e7c4c50-1d33-4947-80bc-ac3ca2453fd5" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 904.603782] env[61273]: DEBUG oslo_concurrency.lockutils [req-ddb5c51e-9221-48b2-939a-e5bf1eaeac7b req-a73514fc-6965-4b2e-99c9-e4f4ab68a869 service nova] Acquired lock "refresh_cache-1e7c4c50-1d33-4947-80bc-ac3ca2453fd5" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 904.603944] env[61273]: DEBUG nova.network.neutron [req-ddb5c51e-9221-48b2-939a-e5bf1eaeac7b req-a73514fc-6965-4b2e-99c9-e4f4ab68a869 service nova] [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] Refreshing network info cache for port 9ea1dfca-8560-4521-8e33-50b5c0f74903 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 904.604383] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-ddb5c51e-9221-48b2-939a-e5bf1eaeac7b req-a73514fc-6965-4b2e-99c9-e4f4ab68a869 service nova] Expecting reply to msg fbd44e59926a4e2191608d576ea7b572 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 904.616037] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fbd44e59926a4e2191608d576ea7b572 [ 904.616513] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 71292983137a4843a26a3971e363acb4 [ 904.630781] env[61273]: ERROR nova.compute.manager [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 9ea1dfca-8560-4521-8e33-50b5c0f74903, please check neutron logs for more information. [ 904.630781] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 904.630781] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 904.630781] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 904.630781] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 904.630781] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 904.630781] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 904.630781] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 904.630781] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 904.630781] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 904.630781] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 904.630781] env[61273]: ERROR nova.compute.manager raise self.value [ 904.630781] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 904.630781] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 904.630781] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 904.630781] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 904.631244] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 904.631244] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 904.631244] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 9ea1dfca-8560-4521-8e33-50b5c0f74903, please check neutron logs for more information. [ 904.631244] env[61273]: ERROR nova.compute.manager [ 904.631244] env[61273]: Traceback (most recent call last): [ 904.631244] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 904.631244] env[61273]: listener.cb(fileno) [ 904.631244] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 904.631244] env[61273]: result = function(*args, **kwargs) [ 904.631244] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 904.631244] env[61273]: return func(*args, **kwargs) [ 904.631244] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 904.631244] env[61273]: raise e [ 904.631244] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 904.631244] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 904.631244] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 904.631244] env[61273]: created_port_ids = self._update_ports_for_instance( [ 904.631244] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 904.631244] env[61273]: with excutils.save_and_reraise_exception(): [ 904.631244] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 904.631244] env[61273]: self.force_reraise() [ 904.631244] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 904.631244] env[61273]: raise self.value [ 904.631244] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 904.631244] env[61273]: updated_port = self._update_port( [ 904.631244] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 904.631244] env[61273]: _ensure_no_port_binding_failure(port) [ 904.631244] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 904.631244] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 904.632174] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 9ea1dfca-8560-4521-8e33-50b5c0f74903, please check neutron logs for more information. [ 904.632174] env[61273]: Removing descriptor: 15 [ 904.773621] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Releasing lock "refresh_cache-d4a2025d-c128-45a2-b74c-a7fd2630d615" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 904.774044] env[61273]: DEBUG nova.compute.manager [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 904.774242] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 904.774581] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-88af211e-36a2-40b7-975e-9de7ec828b9f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.783599] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a2b7531-02e6-4a05-8d3f-3f6cafdcb324 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.793525] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.723s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 904.794167] env[61273]: ERROR nova.compute.manager [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 4bd4596c-8a2e-47f5-9a3c-6664f1a4b1ac, please check neutron logs for more information. [ 904.794167] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] Traceback (most recent call last): [ 904.794167] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 904.794167] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] self.driver.spawn(context, instance, image_meta, [ 904.794167] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 904.794167] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] self._vmops.spawn(context, instance, image_meta, injected_files, [ 904.794167] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 904.794167] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] vm_ref = self.build_virtual_machine(instance, [ 904.794167] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 904.794167] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] vif_infos = vmwarevif.get_vif_info(self._session, [ 904.794167] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 904.794528] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] for vif in network_info: [ 904.794528] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 904.794528] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] return self._sync_wrapper(fn, *args, **kwargs) [ 904.794528] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 904.794528] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] self.wait() [ 904.794528] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 904.794528] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] self[:] = self._gt.wait() [ 904.794528] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 904.794528] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] return self._exit_event.wait() [ 904.794528] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 904.794528] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] result = hub.switch() [ 904.794528] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 904.794528] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] return self.greenlet.switch() [ 904.794920] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 904.794920] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] result = function(*args, **kwargs) [ 904.794920] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 904.794920] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] return func(*args, **kwargs) [ 904.794920] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 904.794920] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] raise e [ 904.794920] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 904.794920] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] nwinfo = self.network_api.allocate_for_instance( [ 904.794920] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 904.794920] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] created_port_ids = self._update_ports_for_instance( [ 904.794920] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 904.794920] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] with excutils.save_and_reraise_exception(): [ 904.794920] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 904.795307] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] self.force_reraise() [ 904.795307] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 904.795307] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] raise self.value [ 904.795307] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 904.795307] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] updated_port = self._update_port( [ 904.795307] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 904.795307] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] _ensure_no_port_binding_failure(port) [ 904.795307] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 904.795307] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] raise exception.PortBindingFailed(port_id=port['id']) [ 904.795307] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] nova.exception.PortBindingFailed: Binding failed for port 4bd4596c-8a2e-47f5-9a3c-6664f1a4b1ac, please check neutron logs for more information. [ 904.795307] env[61273]: ERROR nova.compute.manager [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] [ 904.795621] env[61273]: DEBUG nova.compute.utils [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] Binding failed for port 4bd4596c-8a2e-47f5-9a3c-6664f1a4b1ac, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 904.795998] env[61273]: DEBUG oslo_concurrency.lockutils [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.286s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 904.797654] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 115bf4fdfedf4b2f9cc7bbdc3c9040ea in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 904.799154] env[61273]: DEBUG nova.compute.manager [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] Build of instance 9adae455-b609-4ecb-8841-43fb4d826f84 was re-scheduled: Binding failed for port 4bd4596c-8a2e-47f5-9a3c-6664f1a4b1ac, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 904.799560] env[61273]: DEBUG nova.compute.manager [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 904.799834] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Acquiring lock "refresh_cache-9adae455-b609-4ecb-8841-43fb4d826f84" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 904.799921] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Acquired lock "refresh_cache-9adae455-b609-4ecb-8841-43fb4d826f84" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 904.800065] env[61273]: DEBUG nova.network.neutron [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 904.800416] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Expecting reply to msg 92911082e07149ef8f0d2e8e8e13710b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 904.809154] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d4a2025d-c128-45a2-b74c-a7fd2630d615 could not be found. [ 904.809346] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 904.809514] env[61273]: INFO nova.compute.manager [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] Took 0.04 seconds to destroy the instance on the hypervisor. [ 904.809747] env[61273]: DEBUG oslo.service.loopingcall [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 904.810204] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 92911082e07149ef8f0d2e8e8e13710b [ 904.810548] env[61273]: DEBUG nova.compute.manager [-] [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 904.810643] env[61273]: DEBUG nova.network.neutron [-] [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 904.826615] env[61273]: DEBUG nova.network.neutron [-] [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 904.827133] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg f4e0f12c96cc49fd8c216582f641e733 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 904.831230] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 115bf4fdfedf4b2f9cc7bbdc3c9040ea [ 904.836039] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f4e0f12c96cc49fd8c216582f641e733 [ 905.087420] env[61273]: DEBUG nova.compute.manager [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 905.114604] env[61273]: DEBUG nova.virt.hardware [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 905.114842] env[61273]: DEBUG nova.virt.hardware [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 905.115000] env[61273]: DEBUG nova.virt.hardware [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 905.115178] env[61273]: DEBUG nova.virt.hardware [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 905.115319] env[61273]: DEBUG nova.virt.hardware [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 905.115463] env[61273]: DEBUG nova.virt.hardware [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 905.115665] env[61273]: DEBUG nova.virt.hardware [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 905.115821] env[61273]: DEBUG nova.virt.hardware [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 905.115982] env[61273]: DEBUG nova.virt.hardware [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 905.116157] env[61273]: DEBUG nova.virt.hardware [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 905.116326] env[61273]: DEBUG nova.virt.hardware [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 905.117164] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b37f788b-4b96-4e7e-96c6-11fe478c9d12 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.124935] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1581b360-5a2d-44bd-a97e-3c084ab7caff {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.138328] env[61273]: ERROR nova.compute.manager [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 9ea1dfca-8560-4521-8e33-50b5c0f74903, please check neutron logs for more information. [ 905.138328] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] Traceback (most recent call last): [ 905.138328] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 905.138328] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] yield resources [ 905.138328] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 905.138328] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] self.driver.spawn(context, instance, image_meta, [ 905.138328] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 905.138328] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 905.138328] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 905.138328] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] vm_ref = self.build_virtual_machine(instance, [ 905.138328] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 905.138863] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] vif_infos = vmwarevif.get_vif_info(self._session, [ 905.138863] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 905.138863] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] for vif in network_info: [ 905.138863] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 905.138863] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] return self._sync_wrapper(fn, *args, **kwargs) [ 905.138863] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 905.138863] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] self.wait() [ 905.138863] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 905.138863] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] self[:] = self._gt.wait() [ 905.138863] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 905.138863] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] return self._exit_event.wait() [ 905.138863] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 905.138863] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] current.throw(*self._exc) [ 905.139402] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 905.139402] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] result = function(*args, **kwargs) [ 905.139402] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 905.139402] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] return func(*args, **kwargs) [ 905.139402] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 905.139402] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] raise e [ 905.139402] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 905.139402] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] nwinfo = self.network_api.allocate_for_instance( [ 905.139402] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 905.139402] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] created_port_ids = self._update_ports_for_instance( [ 905.139402] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 905.139402] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] with excutils.save_and_reraise_exception(): [ 905.139402] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 905.139997] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] self.force_reraise() [ 905.139997] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 905.139997] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] raise self.value [ 905.139997] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 905.139997] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] updated_port = self._update_port( [ 905.139997] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 905.139997] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] _ensure_no_port_binding_failure(port) [ 905.139997] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 905.139997] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] raise exception.PortBindingFailed(port_id=port['id']) [ 905.139997] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] nova.exception.PortBindingFailed: Binding failed for port 9ea1dfca-8560-4521-8e33-50b5c0f74903, please check neutron logs for more information. [ 905.139997] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] [ 905.139997] env[61273]: INFO nova.compute.manager [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] Terminating instance [ 905.140513] env[61273]: DEBUG nova.network.neutron [req-ddb5c51e-9221-48b2-939a-e5bf1eaeac7b req-a73514fc-6965-4b2e-99c9-e4f4ab68a869 service nova] [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 905.142289] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Acquiring lock "refresh_cache-1e7c4c50-1d33-4947-80bc-ac3ca2453fd5" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 905.237107] env[61273]: DEBUG nova.network.neutron [req-ddb5c51e-9221-48b2-939a-e5bf1eaeac7b req-a73514fc-6965-4b2e-99c9-e4f4ab68a869 service nova] [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 905.237640] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-ddb5c51e-9221-48b2-939a-e5bf1eaeac7b req-a73514fc-6965-4b2e-99c9-e4f4ab68a869 service nova] Expecting reply to msg 9d1fd73345b04d4fbe64da1aee5e9132 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 905.246084] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9d1fd73345b04d4fbe64da1aee5e9132 [ 905.317086] env[61273]: DEBUG nova.network.neutron [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 905.329089] env[61273]: DEBUG nova.network.neutron [-] [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 905.329500] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg f7335f3c3b3f43f59b4e43ca1d01f9e8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 905.337426] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f7335f3c3b3f43f59b4e43ca1d01f9e8 [ 905.391828] env[61273]: DEBUG nova.network.neutron [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 905.392267] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Expecting reply to msg e44b31e892d4472f8dad5d2132e13703 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 905.400278] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e44b31e892d4472f8dad5d2132e13703 [ 905.434491] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6beaa83-42c0-4a50-ad9a-10dbcf07511f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.441954] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1e9b672-17ad-4cb4-90a2-4bd69526829a {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.471258] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4c9e161-432d-49a1-83d3-780fc6e558d3 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.478348] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d084f1b-9718-4732-8244-0ef54d408cfa {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.492293] env[61273]: DEBUG nova.compute.provider_tree [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 905.492817] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 21ca360c02734c2aba048f0a5f0316ba in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 905.499659] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 21ca360c02734c2aba048f0a5f0316ba [ 905.739753] env[61273]: DEBUG oslo_concurrency.lockutils [req-ddb5c51e-9221-48b2-939a-e5bf1eaeac7b req-a73514fc-6965-4b2e-99c9-e4f4ab68a869 service nova] Releasing lock "refresh_cache-1e7c4c50-1d33-4947-80bc-ac3ca2453fd5" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 905.740250] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Acquired lock "refresh_cache-1e7c4c50-1d33-4947-80bc-ac3ca2453fd5" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 905.740436] env[61273]: DEBUG nova.network.neutron [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 905.740871] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg e21dd1c8d74940d0bd515c795e196d1e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 905.747899] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e21dd1c8d74940d0bd515c795e196d1e [ 905.831346] env[61273]: INFO nova.compute.manager [-] [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] Took 1.02 seconds to deallocate network for instance. [ 905.833769] env[61273]: DEBUG nova.compute.claims [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 905.833929] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 905.894271] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Releasing lock "refresh_cache-9adae455-b609-4ecb-8841-43fb4d826f84" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 905.894521] env[61273]: DEBUG nova.compute.manager [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 905.894697] env[61273]: DEBUG nova.compute.manager [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 905.894865] env[61273]: DEBUG nova.network.neutron [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 905.910082] env[61273]: DEBUG nova.network.neutron [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 905.910644] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Expecting reply to msg 8d5f2c24d4494e3ead2337f33b683830 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 905.922627] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8d5f2c24d4494e3ead2337f33b683830 [ 905.995738] env[61273]: DEBUG nova.scheduler.client.report [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 905.998628] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 306b26d17e7044a4847cbb87d1c6a531 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 906.010241] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 306b26d17e7044a4847cbb87d1c6a531 [ 906.258041] env[61273]: DEBUG nova.network.neutron [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 906.319836] env[61273]: DEBUG nova.network.neutron [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 906.320413] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 797ecf24daa847f4adf49708e3be2f78 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 906.328305] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 797ecf24daa847f4adf49708e3be2f78 [ 906.413024] env[61273]: DEBUG nova.network.neutron [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 906.413665] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Expecting reply to msg 17cfa095637749319d07072d8f78c558 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 906.421832] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 17cfa095637749319d07072d8f78c558 [ 906.504934] env[61273]: DEBUG oslo_concurrency.lockutils [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.709s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 906.505556] env[61273]: ERROR nova.compute.manager [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port bfc6fb0b-0208-45a8-ba4f-77ee0a986c82, please check neutron logs for more information. [ 906.505556] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] Traceback (most recent call last): [ 906.505556] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 906.505556] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] self.driver.spawn(context, instance, image_meta, [ 906.505556] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 906.505556] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] self._vmops.spawn(context, instance, image_meta, injected_files, [ 906.505556] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 906.505556] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] vm_ref = self.build_virtual_machine(instance, [ 906.505556] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 906.505556] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] vif_infos = vmwarevif.get_vif_info(self._session, [ 906.505556] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 906.506084] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] for vif in network_info: [ 906.506084] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 906.506084] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] return self._sync_wrapper(fn, *args, **kwargs) [ 906.506084] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 906.506084] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] self.wait() [ 906.506084] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 906.506084] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] self[:] = self._gt.wait() [ 906.506084] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 906.506084] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] return self._exit_event.wait() [ 906.506084] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 906.506084] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] result = hub.switch() [ 906.506084] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 906.506084] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] return self.greenlet.switch() [ 906.506665] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 906.506665] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] result = function(*args, **kwargs) [ 906.506665] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 906.506665] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] return func(*args, **kwargs) [ 906.506665] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 906.506665] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] raise e [ 906.506665] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 906.506665] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] nwinfo = self.network_api.allocate_for_instance( [ 906.506665] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 906.506665] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] created_port_ids = self._update_ports_for_instance( [ 906.506665] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 906.506665] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] with excutils.save_and_reraise_exception(): [ 906.506665] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 906.507479] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] self.force_reraise() [ 906.507479] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 906.507479] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] raise self.value [ 906.507479] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 906.507479] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] updated_port = self._update_port( [ 906.507479] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 906.507479] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] _ensure_no_port_binding_failure(port) [ 906.507479] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 906.507479] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] raise exception.PortBindingFailed(port_id=port['id']) [ 906.507479] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] nova.exception.PortBindingFailed: Binding failed for port bfc6fb0b-0208-45a8-ba4f-77ee0a986c82, please check neutron logs for more information. [ 906.507479] env[61273]: ERROR nova.compute.manager [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] [ 906.507986] env[61273]: DEBUG nova.compute.utils [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] Binding failed for port bfc6fb0b-0208-45a8-ba4f-77ee0a986c82, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 906.507986] env[61273]: DEBUG oslo_concurrency.lockutils [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.379s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 906.509272] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Expecting reply to msg 545dbaf8bd11437a8a70f12c377a3ec4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 906.510371] env[61273]: DEBUG nova.compute.manager [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] Build of instance 9cedc314-173e-4686-8ee5-28c2512dbcba was re-scheduled: Binding failed for port bfc6fb0b-0208-45a8-ba4f-77ee0a986c82, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 906.510785] env[61273]: DEBUG nova.compute.manager [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 906.511009] env[61273]: DEBUG oslo_concurrency.lockutils [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Acquiring lock "refresh_cache-9cedc314-173e-4686-8ee5-28c2512dbcba" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 906.511156] env[61273]: DEBUG oslo_concurrency.lockutils [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Acquired lock "refresh_cache-9cedc314-173e-4686-8ee5-28c2512dbcba" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 906.511316] env[61273]: DEBUG nova.network.neutron [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 906.511674] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 8f31a1e5fa544884a0cce059b3e86d7f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 906.517694] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8f31a1e5fa544884a0cce059b3e86d7f [ 906.541776] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 545dbaf8bd11437a8a70f12c377a3ec4 [ 906.633967] env[61273]: DEBUG nova.compute.manager [req-1452cf4b-946d-49c3-ac00-a985839aee2f req-fb0ef8ca-635c-47ba-aae3-c918204ce20a service nova] [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] Received event network-vif-deleted-9ea1dfca-8560-4521-8e33-50b5c0f74903 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 906.822902] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Releasing lock "refresh_cache-1e7c4c50-1d33-4947-80bc-ac3ca2453fd5" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 906.823347] env[61273]: DEBUG nova.compute.manager [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 906.823545] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 906.823850] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-daa6f83d-c572-4e6e-a59a-14d282e98289 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.833565] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39d74dcc-0717-4eb9-a662-8f9a532d6ad7 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.853906] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5 could not be found. [ 906.854172] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 906.854365] env[61273]: INFO nova.compute.manager [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] Took 0.03 seconds to destroy the instance on the hypervisor. [ 906.854601] env[61273]: DEBUG oslo.service.loopingcall [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 906.854817] env[61273]: DEBUG nova.compute.manager [-] [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 906.854913] env[61273]: DEBUG nova.network.neutron [-] [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 906.868839] env[61273]: DEBUG nova.network.neutron [-] [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 906.869284] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg c2e559c0b64147208cee563173b1d468 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 906.876319] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c2e559c0b64147208cee563173b1d468 [ 906.915220] env[61273]: INFO nova.compute.manager [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] [instance: 9adae455-b609-4ecb-8841-43fb4d826f84] Took 1.02 seconds to deallocate network for instance. [ 906.916900] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Expecting reply to msg 15033eb280824f5d980d4708b272c814 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 906.948033] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 15033eb280824f5d980d4708b272c814 [ 907.033452] env[61273]: DEBUG nova.network.neutron [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 907.096880] env[61273]: DEBUG nova.network.neutron [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 907.097408] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg f170d1b5d0c64a9cabf4d8fd5943a600 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 907.107662] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f170d1b5d0c64a9cabf4d8fd5943a600 [ 907.157787] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edde0262-1c01-4ab8-83c3-f6efb1088b60 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.164535] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-801ae3f6-3e73-45a6-b0c3-e317257ab79d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.194861] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56f4e8dc-a16f-4396-9ef9-189ff7be161a {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.201529] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e974f4f1-241e-4fb9-81f8-88658fc29434 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.214027] env[61273]: DEBUG nova.compute.provider_tree [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 907.214487] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Expecting reply to msg 9f3870ca9a2e4a6fbeaceb69e3c39d2b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 907.222581] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9f3870ca9a2e4a6fbeaceb69e3c39d2b [ 907.371220] env[61273]: DEBUG nova.network.neutron [-] [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 907.371650] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 71602cdb65664708920b4b55acc7b06c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 907.379341] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 71602cdb65664708920b4b55acc7b06c [ 907.420794] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Expecting reply to msg b986a2de78f64337afa7fb34a9156154 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 907.452801] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b986a2de78f64337afa7fb34a9156154 [ 907.599675] env[61273]: DEBUG oslo_concurrency.lockutils [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Releasing lock "refresh_cache-9cedc314-173e-4686-8ee5-28c2512dbcba" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 907.599939] env[61273]: DEBUG nova.compute.manager [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 907.600098] env[61273]: DEBUG nova.compute.manager [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 907.600314] env[61273]: DEBUG nova.network.neutron [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 907.615479] env[61273]: DEBUG nova.network.neutron [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 907.615997] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg bfe2899990fa42cd8f4246c6d675a13e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 907.623129] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bfe2899990fa42cd8f4246c6d675a13e [ 907.716673] env[61273]: DEBUG nova.scheduler.client.report [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 907.719016] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Expecting reply to msg 33753c7610df491fb90c9feb66efb67c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 907.729997] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 33753c7610df491fb90c9feb66efb67c [ 907.873789] env[61273]: INFO nova.compute.manager [-] [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] Took 1.02 seconds to deallocate network for instance. [ 907.876128] env[61273]: DEBUG nova.compute.claims [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 907.876300] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 907.945155] env[61273]: INFO nova.scheduler.client.report [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Deleted allocations for instance 9adae455-b609-4ecb-8841-43fb4d826f84 [ 907.951110] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Expecting reply to msg 2d9de83a206b41e6bbb264ab9463a1d3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 907.963436] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2d9de83a206b41e6bbb264ab9463a1d3 [ 908.118936] env[61273]: DEBUG nova.network.neutron [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 908.119444] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 59f18e7527444277a36156e5ebd1f45d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 908.128145] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 59f18e7527444277a36156e5ebd1f45d [ 908.222086] env[61273]: DEBUG oslo_concurrency.lockutils [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.715s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 908.222767] env[61273]: ERROR nova.compute.manager [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 228ecc96-89ef-4a4e-8571-06b5011021b9, please check neutron logs for more information. [ 908.222767] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] Traceback (most recent call last): [ 908.222767] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 908.222767] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] self.driver.spawn(context, instance, image_meta, [ 908.222767] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 908.222767] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] self._vmops.spawn(context, instance, image_meta, injected_files, [ 908.222767] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 908.222767] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] vm_ref = self.build_virtual_machine(instance, [ 908.222767] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 908.222767] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] vif_infos = vmwarevif.get_vif_info(self._session, [ 908.222767] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 908.223081] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] for vif in network_info: [ 908.223081] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 908.223081] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] return self._sync_wrapper(fn, *args, **kwargs) [ 908.223081] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 908.223081] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] self.wait() [ 908.223081] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 908.223081] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] self[:] = self._gt.wait() [ 908.223081] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 908.223081] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] return self._exit_event.wait() [ 908.223081] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 908.223081] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] current.throw(*self._exc) [ 908.223081] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 908.223081] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] result = function(*args, **kwargs) [ 908.223461] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 908.223461] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] return func(*args, **kwargs) [ 908.223461] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 908.223461] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] raise e [ 908.223461] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 908.223461] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] nwinfo = self.network_api.allocate_for_instance( [ 908.223461] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 908.223461] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] created_port_ids = self._update_ports_for_instance( [ 908.223461] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 908.223461] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] with excutils.save_and_reraise_exception(): [ 908.223461] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 908.223461] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] self.force_reraise() [ 908.223461] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 908.223824] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] raise self.value [ 908.223824] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 908.223824] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] updated_port = self._update_port( [ 908.223824] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 908.223824] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] _ensure_no_port_binding_failure(port) [ 908.223824] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 908.223824] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] raise exception.PortBindingFailed(port_id=port['id']) [ 908.223824] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] nova.exception.PortBindingFailed: Binding failed for port 228ecc96-89ef-4a4e-8571-06b5011021b9, please check neutron logs for more information. [ 908.223824] env[61273]: ERROR nova.compute.manager [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] [ 908.223824] env[61273]: DEBUG nova.compute.utils [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] Binding failed for port 228ecc96-89ef-4a4e-8571-06b5011021b9, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 908.224701] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 12.686s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 908.227146] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] Expecting reply to msg 61064bb1377d4f69ad966585d91d3ac6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 908.228676] env[61273]: DEBUG nova.compute.manager [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] Build of instance 31ab5ebd-3df1-4e9f-bf53-69d47176da01 was re-scheduled: Binding failed for port 228ecc96-89ef-4a4e-8571-06b5011021b9, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 908.229207] env[61273]: DEBUG nova.compute.manager [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 908.229439] env[61273]: DEBUG oslo_concurrency.lockutils [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Acquiring lock "refresh_cache-31ab5ebd-3df1-4e9f-bf53-69d47176da01" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 908.229588] env[61273]: DEBUG oslo_concurrency.lockutils [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Acquired lock "refresh_cache-31ab5ebd-3df1-4e9f-bf53-69d47176da01" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 908.229767] env[61273]: DEBUG nova.network.neutron [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 908.230268] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Expecting reply to msg e789b293e75b41b1822b05caea2d6d50 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 908.236543] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e789b293e75b41b1822b05caea2d6d50 [ 908.265803] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 61064bb1377d4f69ad966585d91d3ac6 [ 908.452797] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b068d399-cc70-41c3-a78d-284739a9238a tempest-ServersTestMultiNic-1091074743 tempest-ServersTestMultiNic-1091074743-project-member] Lock "9adae455-b609-4ecb-8841-43fb4d826f84" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 93.588s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 908.453454] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg b6444a4ca25d4eae88f258650ed8b81c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 908.462901] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b6444a4ca25d4eae88f258650ed8b81c [ 908.621580] env[61273]: INFO nova.compute.manager [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 9cedc314-173e-4686-8ee5-28c2512dbcba] Took 1.02 seconds to deallocate network for instance. [ 908.623336] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg ab15d6851bd84c589e2f22ad707a1878 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 908.654045] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ab15d6851bd84c589e2f22ad707a1878 [ 908.756852] env[61273]: DEBUG nova.network.neutron [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 908.867374] env[61273]: DEBUG nova.network.neutron [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 908.867907] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Expecting reply to msg 7a4f168eb3764ac5be22dee77de9c7e0 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 908.875994] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7a4f168eb3764ac5be22dee77de9c7e0 [ 908.877947] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ece3037-c67b-41c0-a71a-6240dc644a40 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.886375] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b830159-29bb-4cc1-98f6-65f54899b1c9 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.919740] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52048703-b1dd-4097-832e-eb6971e1997c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.929324] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7fc8e26-ac1c-4860-8d5f-f92a0b8aecb6 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.942873] env[61273]: DEBUG nova.compute.provider_tree [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 908.943467] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] Expecting reply to msg 27d949269a984bb59d40d7b656a5c944 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 908.952373] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 27d949269a984bb59d40d7b656a5c944 [ 908.955202] env[61273]: DEBUG nova.compute.manager [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 908.956939] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 90e397c6fa1242fcacabd64bdfcd6fac in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 908.990864] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 90e397c6fa1242fcacabd64bdfcd6fac [ 909.131077] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg c3ea825840744630b1068fecdeeebdc6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 909.159822] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c3ea825840744630b1068fecdeeebdc6 [ 909.370211] env[61273]: DEBUG oslo_concurrency.lockutils [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Releasing lock "refresh_cache-31ab5ebd-3df1-4e9f-bf53-69d47176da01" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 909.370513] env[61273]: DEBUG nova.compute.manager [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 909.370737] env[61273]: DEBUG nova.compute.manager [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 909.370910] env[61273]: DEBUG nova.network.neutron [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 909.386065] env[61273]: DEBUG nova.network.neutron [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 909.386589] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Expecting reply to msg dcc76e7ff4a940ebb5b70069b100787e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 909.395105] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dcc76e7ff4a940ebb5b70069b100787e [ 909.446220] env[61273]: DEBUG nova.scheduler.client.report [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 909.448683] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] Expecting reply to msg ac67b1f0b3ae45cb9523930e46abcabc in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 909.464108] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ac67b1f0b3ae45cb9523930e46abcabc [ 909.478993] env[61273]: DEBUG oslo_concurrency.lockutils [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 909.656647] env[61273]: INFO nova.scheduler.client.report [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Deleted allocations for instance 9cedc314-173e-4686-8ee5-28c2512dbcba [ 909.662471] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg df9b6fe2c06d446f8d3d81b7ae54d9e3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 909.680545] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg df9b6fe2c06d446f8d3d81b7ae54d9e3 [ 909.888557] env[61273]: DEBUG nova.network.neutron [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 909.889465] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Expecting reply to msg 55a1b27ae9d7400c96ff04571e2db513 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 909.898377] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 55a1b27ae9d7400c96ff04571e2db513 [ 909.951414] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.727s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 909.952118] env[61273]: ERROR nova.compute.manager [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 7a55ee69-0884-433c-9e35-dd184ba21c20, please check neutron logs for more information. [ 909.952118] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] Traceback (most recent call last): [ 909.952118] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 909.952118] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] self.driver.spawn(context, instance, image_meta, [ 909.952118] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 909.952118] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] self._vmops.spawn(context, instance, image_meta, injected_files, [ 909.952118] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 909.952118] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] vm_ref = self.build_virtual_machine(instance, [ 909.952118] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 909.952118] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] vif_infos = vmwarevif.get_vif_info(self._session, [ 909.952118] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 909.952516] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] for vif in network_info: [ 909.952516] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 909.952516] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] return self._sync_wrapper(fn, *args, **kwargs) [ 909.952516] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 909.952516] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] self.wait() [ 909.952516] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 909.952516] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] self[:] = self._gt.wait() [ 909.952516] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 909.952516] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] return self._exit_event.wait() [ 909.952516] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 909.952516] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] current.throw(*self._exc) [ 909.952516] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 909.952516] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] result = function(*args, **kwargs) [ 909.952887] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 909.952887] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] return func(*args, **kwargs) [ 909.952887] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 909.952887] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] raise e [ 909.952887] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 909.952887] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] nwinfo = self.network_api.allocate_for_instance( [ 909.952887] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 909.952887] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] created_port_ids = self._update_ports_for_instance( [ 909.952887] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 909.952887] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] with excutils.save_and_reraise_exception(): [ 909.952887] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 909.952887] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] self.force_reraise() [ 909.952887] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 909.953245] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] raise self.value [ 909.953245] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 909.953245] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] updated_port = self._update_port( [ 909.953245] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 909.953245] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] _ensure_no_port_binding_failure(port) [ 909.953245] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 909.953245] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] raise exception.PortBindingFailed(port_id=port['id']) [ 909.953245] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] nova.exception.PortBindingFailed: Binding failed for port 7a55ee69-0884-433c-9e35-dd184ba21c20, please check neutron logs for more information. [ 909.953245] env[61273]: ERROR nova.compute.manager [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] [ 909.953245] env[61273]: DEBUG nova.compute.utils [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] Binding failed for port 7a55ee69-0884-433c-9e35-dd184ba21c20, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 909.954900] env[61273]: DEBUG oslo_concurrency.lockutils [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 12.881s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 909.956233] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] Expecting reply to msg 9963836ab91b4eae8b2dfcf5de556707 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 909.957330] env[61273]: DEBUG nova.compute.manager [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] Build of instance 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef was re-scheduled: Binding failed for port 7a55ee69-0884-433c-9e35-dd184ba21c20, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 909.957743] env[61273]: DEBUG nova.compute.manager [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 909.957955] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] Acquiring lock "refresh_cache-7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 909.958694] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] Acquired lock "refresh_cache-7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 909.958694] env[61273]: DEBUG nova.network.neutron [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 909.958694] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] Expecting reply to msg 1efec5bf64974230a312fb784c15c029 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 909.965634] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1efec5bf64974230a312fb784c15c029 [ 909.991284] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9963836ab91b4eae8b2dfcf5de556707 [ 910.164629] env[61273]: DEBUG oslo_concurrency.lockutils [None req-6aa7edfa-ebcc-4f5b-a570-60fdb481dc84 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Lock "9cedc314-173e-4686-8ee5-28c2512dbcba" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 94.771s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 910.392377] env[61273]: INFO nova.compute.manager [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] [instance: 31ab5ebd-3df1-4e9f-bf53-69d47176da01] Took 1.02 seconds to deallocate network for instance. [ 910.394276] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Expecting reply to msg d91c5f9102bb48ae8cade7a95875f16d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 910.431390] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d91c5f9102bb48ae8cade7a95875f16d [ 910.479393] env[61273]: DEBUG nova.network.neutron [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 910.557589] env[61273]: DEBUG nova.network.neutron [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 910.558142] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] Expecting reply to msg 5d10a10d8f764dd892a1e00ab0aa733b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 910.569963] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5d10a10d8f764dd892a1e00ab0aa733b [ 910.604315] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffa3b812-5e46-450e-8fda-25ba5b2569c0 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.611889] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c865121-980e-498d-84e4-af438e4a9bcb {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.641368] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8ab5d73-2147-4f1f-a84e-f75d9f82b797 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.648680] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83ad343d-9f17-49b7-bb84-949d584c590f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.663269] env[61273]: DEBUG nova.compute.provider_tree [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 910.663718] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] Expecting reply to msg d02a48c48bad4e909961e42d854b358e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 910.672823] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d02a48c48bad4e909961e42d854b358e [ 910.899162] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Expecting reply to msg e3f61608dc9e463ba23dd10c70837479 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 910.949870] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e3f61608dc9e463ba23dd10c70837479 [ 911.060128] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] Releasing lock "refresh_cache-7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 911.060396] env[61273]: DEBUG nova.compute.manager [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 911.060584] env[61273]: DEBUG nova.compute.manager [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 911.060756] env[61273]: DEBUG nova.network.neutron [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 911.075860] env[61273]: DEBUG nova.network.neutron [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 911.076483] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] Expecting reply to msg 0b39688911bb49058e7b069ea8d15679 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 911.085394] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0b39688911bb49058e7b069ea8d15679 [ 911.166606] env[61273]: DEBUG nova.scheduler.client.report [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 911.172854] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] Expecting reply to msg 56f4f780711f452c8dc475ab1eb459a4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 911.195940] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 56f4f780711f452c8dc475ab1eb459a4 [ 911.276695] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Acquiring lock "d10134a3-6f70-4f00-b810-371e17d2a1ab" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 911.276951] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Lock "d10134a3-6f70-4f00-b810-371e17d2a1ab" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 911.277495] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 3ad2c88950214754bd08349ae018c0aa in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 911.286685] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3ad2c88950214754bd08349ae018c0aa [ 911.423467] env[61273]: INFO nova.scheduler.client.report [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Deleted allocations for instance 31ab5ebd-3df1-4e9f-bf53-69d47176da01 [ 911.429876] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Expecting reply to msg 061352be777f487d9f122b5ac70173f5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 911.440693] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 061352be777f487d9f122b5ac70173f5 [ 911.579156] env[61273]: DEBUG nova.network.neutron [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 911.579745] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] Expecting reply to msg e96757b2ac1e45f9828784bfbc4bd4c4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 911.588206] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e96757b2ac1e45f9828784bfbc4bd4c4 [ 911.675712] env[61273]: DEBUG oslo_concurrency.lockutils [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.721s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 911.676175] env[61273]: ERROR nova.compute.manager [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b746e4c7-581b-4e6f-8b47-cc4c45268a37, please check neutron logs for more information. [ 911.676175] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] Traceback (most recent call last): [ 911.676175] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 911.676175] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] self.driver.spawn(context, instance, image_meta, [ 911.676175] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 911.676175] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 911.676175] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 911.676175] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] vm_ref = self.build_virtual_machine(instance, [ 911.676175] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 911.676175] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] vif_infos = vmwarevif.get_vif_info(self._session, [ 911.676175] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 911.676592] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] for vif in network_info: [ 911.676592] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 911.676592] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] return self._sync_wrapper(fn, *args, **kwargs) [ 911.676592] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 911.676592] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] self.wait() [ 911.676592] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 911.676592] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] self[:] = self._gt.wait() [ 911.676592] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 911.676592] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] return self._exit_event.wait() [ 911.676592] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 911.676592] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] current.throw(*self._exc) [ 911.676592] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 911.676592] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] result = function(*args, **kwargs) [ 911.676995] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 911.676995] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] return func(*args, **kwargs) [ 911.676995] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 911.676995] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] raise e [ 911.676995] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 911.676995] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] nwinfo = self.network_api.allocate_for_instance( [ 911.676995] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 911.676995] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] created_port_ids = self._update_ports_for_instance( [ 911.676995] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 911.676995] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] with excutils.save_and_reraise_exception(): [ 911.676995] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 911.676995] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] self.force_reraise() [ 911.676995] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 911.677398] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] raise self.value [ 911.677398] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 911.677398] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] updated_port = self._update_port( [ 911.677398] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 911.677398] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] _ensure_no_port_binding_failure(port) [ 911.677398] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 911.677398] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] raise exception.PortBindingFailed(port_id=port['id']) [ 911.677398] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] nova.exception.PortBindingFailed: Binding failed for port b746e4c7-581b-4e6f-8b47-cc4c45268a37, please check neutron logs for more information. [ 911.677398] env[61273]: ERROR nova.compute.manager [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] [ 911.677398] env[61273]: DEBUG nova.compute.utils [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] Binding failed for port b746e4c7-581b-4e6f-8b47-cc4c45268a37, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 911.678179] env[61273]: DEBUG oslo_concurrency.lockutils [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.389s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 911.680246] env[61273]: INFO nova.compute.claims [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: a70f220a-fa34-44af-939f-29292b556897] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 911.682050] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 24181d5f8efa4895878f08390516346e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 911.683547] env[61273]: DEBUG nova.compute.manager [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] Build of instance d3dafd33-91f8-481d-8f40-8c2e98a7587d was re-scheduled: Binding failed for port b746e4c7-581b-4e6f-8b47-cc4c45268a37, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 911.684052] env[61273]: DEBUG nova.compute.manager [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 911.684214] env[61273]: DEBUG oslo_concurrency.lockutils [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] Acquiring lock "refresh_cache-d3dafd33-91f8-481d-8f40-8c2e98a7587d" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 911.684354] env[61273]: DEBUG oslo_concurrency.lockutils [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] Acquired lock "refresh_cache-d3dafd33-91f8-481d-8f40-8c2e98a7587d" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 911.684506] env[61273]: DEBUG nova.network.neutron [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 911.684875] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] Expecting reply to msg d6fb6635e8ea4333bae5daad5aa3aefc in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 911.691858] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d6fb6635e8ea4333bae5daad5aa3aefc [ 911.718685] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 24181d5f8efa4895878f08390516346e [ 911.779823] env[61273]: DEBUG nova.compute.manager [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 911.781587] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 8d477d8823ef4586a36d0dbfc27f775e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 911.813556] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8d477d8823ef4586a36d0dbfc27f775e [ 911.932232] env[61273]: DEBUG oslo_concurrency.lockutils [None req-39cc91ef-e014-4e03-89cc-20e30a69e8e7 tempest-AttachVolumeShelveTestJSON-1091491161 tempest-AttachVolumeShelveTestJSON-1091491161-project-member] Lock "31ab5ebd-3df1-4e9f-bf53-69d47176da01" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 87.479s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 912.083140] env[61273]: INFO nova.compute.manager [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] [instance: 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef] Took 1.02 seconds to deallocate network for instance. [ 912.084695] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] Expecting reply to msg 4aaa3f6e9a0c410c957ca87e7e7dc782 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 912.120909] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4aaa3f6e9a0c410c957ca87e7e7dc782 [ 912.188400] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 03e066c4c0fc4af1a25f8e444a69ebf9 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 912.200844] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 03e066c4c0fc4af1a25f8e444a69ebf9 [ 912.205588] env[61273]: DEBUG nova.network.neutron [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 912.273745] env[61273]: DEBUG nova.network.neutron [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 912.274374] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] Expecting reply to msg 5639d3e5e7434f4ab937866022872ad4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 912.282600] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5639d3e5e7434f4ab937866022872ad4 [ 912.301288] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 912.589462] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] Expecting reply to msg 041696fecdd84b4889cfefba8c711c47 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 912.619299] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 041696fecdd84b4889cfefba8c711c47 [ 912.776668] env[61273]: DEBUG oslo_concurrency.lockutils [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] Releasing lock "refresh_cache-d3dafd33-91f8-481d-8f40-8c2e98a7587d" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 912.776983] env[61273]: DEBUG nova.compute.manager [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 912.777471] env[61273]: DEBUG nova.compute.manager [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 912.777713] env[61273]: DEBUG nova.network.neutron [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 912.836640] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa475f5e-0d2c-437e-a628-5d65ca05957b {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.846476] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40c31877-3a11-4ccd-ae6a-8389652e0f63 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.880905] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-306c52e2-da89-4612-be5f-d29ba0a79dd4 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.890700] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b1bf3f9-115b-4036-a143-7061e50d7c38 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.905153] env[61273]: DEBUG nova.compute.provider_tree [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 912.905507] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 6bee4fd52d2b40db9a91e1ddec643d74 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 912.912577] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6bee4fd52d2b40db9a91e1ddec643d74 [ 912.973367] env[61273]: DEBUG nova.network.neutron [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 912.974074] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] Expecting reply to msg 86832789e4234d018acbfcdb5b2ff1c3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 912.980897] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 86832789e4234d018acbfcdb5b2ff1c3 [ 913.116910] env[61273]: INFO nova.scheduler.client.report [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] Deleted allocations for instance 7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef [ 913.122904] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] Expecting reply to msg 7573fecdd13745058823d405b2b51cd4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 913.134017] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7573fecdd13745058823d405b2b51cd4 [ 913.408177] env[61273]: DEBUG nova.scheduler.client.report [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 913.410606] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg bac728b0a0fd4fda8d6379618a66a4dd in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 913.427433] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bac728b0a0fd4fda8d6379618a66a4dd [ 913.476031] env[61273]: DEBUG nova.network.neutron [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 913.476501] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] Expecting reply to msg e18eefcbcb204c56bbd9c90d45d269f1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 913.485333] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e18eefcbcb204c56bbd9c90d45d269f1 [ 913.626133] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3fca92b9-ee3c-4d16-8e90-ac9afc5fe25e tempest-ServerPasswordTestJSON-851135744 tempest-ServerPasswordTestJSON-851135744-project-member] Lock "7dc5b4e9-15c3-4f47-9587-6d34bd44a5ef" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 83.215s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 913.918668] env[61273]: DEBUG oslo_concurrency.lockutils [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.240s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 913.919233] env[61273]: DEBUG nova.compute.manager [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: a70f220a-fa34-44af-939f-29292b556897] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 913.921072] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg c041f59e4e1b40c39772f777d45bd501 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 913.922236] env[61273]: DEBUG oslo_concurrency.lockutils [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.923s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 913.923690] env[61273]: INFO nova.compute.claims [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 913.925407] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Expecting reply to msg 00421a51c25244dda477b7b60803f016 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 913.960936] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c041f59e4e1b40c39772f777d45bd501 [ 913.978439] env[61273]: INFO nova.compute.manager [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] [instance: d3dafd33-91f8-481d-8f40-8c2e98a7587d] Took 1.20 seconds to deallocate network for instance. [ 913.980160] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] Expecting reply to msg 2b249d3add424b11be85ffb38188ecb8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 913.981624] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 00421a51c25244dda477b7b60803f016 [ 914.018400] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2b249d3add424b11be85ffb38188ecb8 [ 914.346884] env[61273]: DEBUG oslo_concurrency.lockutils [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] Acquiring lock "2178d195-47f4-47ab-9140-b8f849973434" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 914.347117] env[61273]: DEBUG oslo_concurrency.lockutils [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] Lock "2178d195-47f4-47ab-9140-b8f849973434" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 914.347556] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] Expecting reply to msg 008723e5bceb417089b18c67c055b0b8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 914.356123] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 008723e5bceb417089b18c67c055b0b8 [ 914.432080] env[61273]: DEBUG nova.compute.utils [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 914.432080] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 41dd3fd7581c4c18bd4355d34350d0df in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 914.432080] env[61273]: DEBUG nova.compute.manager [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: a70f220a-fa34-44af-939f-29292b556897] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 914.432080] env[61273]: DEBUG nova.network.neutron [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: a70f220a-fa34-44af-939f-29292b556897] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 914.434398] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Expecting reply to msg d7ee37140e1f405f8fcf7bd8b865d36d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 914.439123] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 41dd3fd7581c4c18bd4355d34350d0df [ 914.442366] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d7ee37140e1f405f8fcf7bd8b865d36d [ 914.485349] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] Expecting reply to msg 882c332e53214b4d9c26599e7ebd621c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 914.527139] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 882c332e53214b4d9c26599e7ebd621c [ 914.560416] env[61273]: DEBUG nova.policy [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3078a2af81b248f8b100f58ee66a5a86', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c06b50a7aaa742afbbd0c6fc56c3d131', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 914.850242] env[61273]: DEBUG nova.compute.manager [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] [instance: 2178d195-47f4-47ab-9140-b8f849973434] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 914.852047] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] Expecting reply to msg dd1c2945819b443994a8c47c86496851 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 914.917837] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dd1c2945819b443994a8c47c86496851 [ 914.934162] env[61273]: DEBUG nova.compute.manager [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: a70f220a-fa34-44af-939f-29292b556897] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 914.935906] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 6ce3ce34631541769cd709ae6db63372 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 915.011188] env[61273]: DEBUG nova.network.neutron [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: a70f220a-fa34-44af-939f-29292b556897] Successfully created port: 25c7c234-2cad-4819-a66e-0c2b3bc4be20 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 915.020499] env[61273]: INFO nova.scheduler.client.report [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] Deleted allocations for instance d3dafd33-91f8-481d-8f40-8c2e98a7587d [ 915.026891] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] Expecting reply to msg 78a40a48144c4db180596fbd00ce2396 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 915.050446] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6ce3ce34631541769cd709ae6db63372 [ 915.072720] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 78a40a48144c4db180596fbd00ce2396 [ 915.141136] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bfc47ef-4d34-40e3-a7eb-bd138a2e8d00 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.150098] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf68db98-7408-47fa-8b47-c0eb904e641e {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.180707] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56ff0d8f-620b-486f-bfd2-e34445fcc266 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.188926] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2d0efc0-7f5b-4268-87d1-1bd817404f34 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.204836] env[61273]: DEBUG nova.compute.provider_tree [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 915.205348] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Expecting reply to msg 8f7f66bf247f4a59913b432429c74078 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 915.212440] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8f7f66bf247f4a59913b432429c74078 [ 915.370396] env[61273]: DEBUG oslo_concurrency.lockutils [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 915.447280] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg efc5e35fd69c470d81ccacb0b02bfbc5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 915.485224] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg efc5e35fd69c470d81ccacb0b02bfbc5 [ 915.528449] env[61273]: DEBUG oslo_concurrency.lockutils [None req-db212900-bf5a-4081-a3f7-a68418d30d6e tempest-ImagesOneServerNegativeTestJSON-358604868 tempest-ImagesOneServerNegativeTestJSON-358604868-project-member] Lock "d3dafd33-91f8-481d-8f40-8c2e98a7587d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 76.545s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 915.676211] env[61273]: DEBUG nova.compute.manager [req-71190aaa-b3c8-4866-b4e6-96ec8b2563cf req-d89d9ced-d1ef-4694-bcf0-4dc927194b71 service nova] [instance: a70f220a-fa34-44af-939f-29292b556897] Received event network-changed-25c7c234-2cad-4819-a66e-0c2b3bc4be20 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 915.676211] env[61273]: DEBUG nova.compute.manager [req-71190aaa-b3c8-4866-b4e6-96ec8b2563cf req-d89d9ced-d1ef-4694-bcf0-4dc927194b71 service nova] [instance: a70f220a-fa34-44af-939f-29292b556897] Refreshing instance network info cache due to event network-changed-25c7c234-2cad-4819-a66e-0c2b3bc4be20. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 915.676211] env[61273]: DEBUG oslo_concurrency.lockutils [req-71190aaa-b3c8-4866-b4e6-96ec8b2563cf req-d89d9ced-d1ef-4694-bcf0-4dc927194b71 service nova] Acquiring lock "refresh_cache-a70f220a-fa34-44af-939f-29292b556897" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 915.676211] env[61273]: DEBUG oslo_concurrency.lockutils [req-71190aaa-b3c8-4866-b4e6-96ec8b2563cf req-d89d9ced-d1ef-4694-bcf0-4dc927194b71 service nova] Acquired lock "refresh_cache-a70f220a-fa34-44af-939f-29292b556897" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 915.676211] env[61273]: DEBUG nova.network.neutron [req-71190aaa-b3c8-4866-b4e6-96ec8b2563cf req-d89d9ced-d1ef-4694-bcf0-4dc927194b71 service nova] [instance: a70f220a-fa34-44af-939f-29292b556897] Refreshing network info cache for port 25c7c234-2cad-4819-a66e-0c2b3bc4be20 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 915.676848] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-71190aaa-b3c8-4866-b4e6-96ec8b2563cf req-d89d9ced-d1ef-4694-bcf0-4dc927194b71 service nova] Expecting reply to msg 80a9738f13fa4c8682f99dbb27c5b92d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 915.686925] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 80a9738f13fa4c8682f99dbb27c5b92d [ 915.707666] env[61273]: DEBUG nova.scheduler.client.report [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 915.710424] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Expecting reply to msg 8aa68c20cac64fde9b4cb7ee7b393f5e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 915.727273] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8aa68c20cac64fde9b4cb7ee7b393f5e [ 915.881081] env[61273]: ERROR nova.compute.manager [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 25c7c234-2cad-4819-a66e-0c2b3bc4be20, please check neutron logs for more information. [ 915.881081] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 915.881081] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 915.881081] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 915.881081] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 915.881081] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 915.881081] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 915.881081] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 915.881081] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 915.881081] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 915.881081] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 915.881081] env[61273]: ERROR nova.compute.manager raise self.value [ 915.881081] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 915.881081] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 915.881081] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 915.881081] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 915.881856] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 915.881856] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 915.881856] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 25c7c234-2cad-4819-a66e-0c2b3bc4be20, please check neutron logs for more information. [ 915.881856] env[61273]: ERROR nova.compute.manager [ 915.881856] env[61273]: Traceback (most recent call last): [ 915.881856] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 915.881856] env[61273]: listener.cb(fileno) [ 915.881856] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 915.881856] env[61273]: result = function(*args, **kwargs) [ 915.881856] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 915.881856] env[61273]: return func(*args, **kwargs) [ 915.881856] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 915.881856] env[61273]: raise e [ 915.881856] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 915.881856] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 915.881856] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 915.881856] env[61273]: created_port_ids = self._update_ports_for_instance( [ 915.881856] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 915.881856] env[61273]: with excutils.save_and_reraise_exception(): [ 915.881856] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 915.881856] env[61273]: self.force_reraise() [ 915.881856] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 915.881856] env[61273]: raise self.value [ 915.881856] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 915.881856] env[61273]: updated_port = self._update_port( [ 915.881856] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 915.881856] env[61273]: _ensure_no_port_binding_failure(port) [ 915.881856] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 915.881856] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 915.882785] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 25c7c234-2cad-4819-a66e-0c2b3bc4be20, please check neutron logs for more information. [ 915.882785] env[61273]: Removing descriptor: 15 [ 915.951398] env[61273]: DEBUG nova.compute.manager [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: a70f220a-fa34-44af-939f-29292b556897] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 915.975687] env[61273]: DEBUG nova.virt.hardware [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 915.975921] env[61273]: DEBUG nova.virt.hardware [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 915.976097] env[61273]: DEBUG nova.virt.hardware [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 915.976279] env[61273]: DEBUG nova.virt.hardware [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 915.976422] env[61273]: DEBUG nova.virt.hardware [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 915.976590] env[61273]: DEBUG nova.virt.hardware [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 915.976830] env[61273]: DEBUG nova.virt.hardware [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 915.976993] env[61273]: DEBUG nova.virt.hardware [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 915.977157] env[61273]: DEBUG nova.virt.hardware [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 915.977316] env[61273]: DEBUG nova.virt.hardware [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 915.977598] env[61273]: DEBUG nova.virt.hardware [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 915.978523] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe7297da-2fec-4d50-8c27-31c396629abb {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.986698] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6906106-1ac6-408d-8162-3d7d5edef6bb {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.001588] env[61273]: ERROR nova.compute.manager [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: a70f220a-fa34-44af-939f-29292b556897] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 25c7c234-2cad-4819-a66e-0c2b3bc4be20, please check neutron logs for more information. [ 916.001588] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] Traceback (most recent call last): [ 916.001588] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 916.001588] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] yield resources [ 916.001588] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 916.001588] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] self.driver.spawn(context, instance, image_meta, [ 916.001588] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 916.001588] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] self._vmops.spawn(context, instance, image_meta, injected_files, [ 916.001588] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 916.001588] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] vm_ref = self.build_virtual_machine(instance, [ 916.001588] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 916.002009] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] vif_infos = vmwarevif.get_vif_info(self._session, [ 916.002009] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 916.002009] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] for vif in network_info: [ 916.002009] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 916.002009] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] return self._sync_wrapper(fn, *args, **kwargs) [ 916.002009] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 916.002009] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] self.wait() [ 916.002009] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 916.002009] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] self[:] = self._gt.wait() [ 916.002009] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 916.002009] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] return self._exit_event.wait() [ 916.002009] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 916.002009] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] current.throw(*self._exc) [ 916.002369] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 916.002369] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] result = function(*args, **kwargs) [ 916.002369] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 916.002369] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] return func(*args, **kwargs) [ 916.002369] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 916.002369] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] raise e [ 916.002369] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 916.002369] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] nwinfo = self.network_api.allocate_for_instance( [ 916.002369] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 916.002369] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] created_port_ids = self._update_ports_for_instance( [ 916.002369] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 916.002369] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] with excutils.save_and_reraise_exception(): [ 916.002369] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 916.002761] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] self.force_reraise() [ 916.002761] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 916.002761] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] raise self.value [ 916.002761] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 916.002761] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] updated_port = self._update_port( [ 916.002761] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 916.002761] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] _ensure_no_port_binding_failure(port) [ 916.002761] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 916.002761] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] raise exception.PortBindingFailed(port_id=port['id']) [ 916.002761] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] nova.exception.PortBindingFailed: Binding failed for port 25c7c234-2cad-4819-a66e-0c2b3bc4be20, please check neutron logs for more information. [ 916.002761] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] [ 916.002761] env[61273]: INFO nova.compute.manager [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: a70f220a-fa34-44af-939f-29292b556897] Terminating instance [ 916.003832] env[61273]: DEBUG oslo_concurrency.lockutils [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Acquiring lock "refresh_cache-a70f220a-fa34-44af-939f-29292b556897" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 916.192018] env[61273]: DEBUG nova.network.neutron [req-71190aaa-b3c8-4866-b4e6-96ec8b2563cf req-d89d9ced-d1ef-4694-bcf0-4dc927194b71 service nova] [instance: a70f220a-fa34-44af-939f-29292b556897] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 916.213105] env[61273]: DEBUG oslo_concurrency.lockutils [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.291s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 916.213556] env[61273]: DEBUG nova.compute.manager [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 916.215341] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Expecting reply to msg 47e2caab46464e31926c316e65394d9d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 916.216362] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 12.390s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 916.218130] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg a5ec3bb316ba41ed9771a46f3792edd7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 916.261131] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a5ec3bb316ba41ed9771a46f3792edd7 [ 916.275320] env[61273]: DEBUG nova.network.neutron [req-71190aaa-b3c8-4866-b4e6-96ec8b2563cf req-d89d9ced-d1ef-4694-bcf0-4dc927194b71 service nova] [instance: a70f220a-fa34-44af-939f-29292b556897] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 916.275865] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-71190aaa-b3c8-4866-b4e6-96ec8b2563cf req-d89d9ced-d1ef-4694-bcf0-4dc927194b71 service nova] Expecting reply to msg a90ba886cc6b430694111122c2bda527 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 916.276898] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 47e2caab46464e31926c316e65394d9d [ 916.281852] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a90ba886cc6b430694111122c2bda527 [ 916.720936] env[61273]: DEBUG nova.compute.utils [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 916.721620] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Expecting reply to msg c71c9ae54a87476b9d5c93dd3997ca6a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 916.725208] env[61273]: DEBUG nova.compute.manager [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 916.725464] env[61273]: DEBUG nova.network.neutron [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 916.733148] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c71c9ae54a87476b9d5c93dd3997ca6a [ 916.766294] env[61273]: DEBUG nova.policy [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5345fbbca90446719473829e2ea02386', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a36723dc732b444e8831b049e9f804b8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 916.781101] env[61273]: DEBUG oslo_concurrency.lockutils [req-71190aaa-b3c8-4866-b4e6-96ec8b2563cf req-d89d9ced-d1ef-4694-bcf0-4dc927194b71 service nova] Releasing lock "refresh_cache-a70f220a-fa34-44af-939f-29292b556897" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 916.781988] env[61273]: DEBUG oslo_concurrency.lockutils [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Acquired lock "refresh_cache-a70f220a-fa34-44af-939f-29292b556897" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 916.781988] env[61273]: DEBUG nova.network.neutron [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: a70f220a-fa34-44af-939f-29292b556897] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 916.782735] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg eca252d1e68c411e918fa7abcdc888a5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 916.789800] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eca252d1e68c411e918fa7abcdc888a5 [ 916.842486] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e5f7d41-bc8b-49e3-8fa2-04562d6a1bc6 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.851624] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cee4c653-f575-4211-a324-88041238be16 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.882713] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d55481a-6f03-41df-8a1b-87708772f84d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.890707] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf9ca855-bed7-4cf4-84ef-5e8425b08544 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.902846] env[61273]: DEBUG nova.compute.provider_tree [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 916.903308] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg 47564a67ba764d029094014a92e4d0d2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 916.910385] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 47564a67ba764d029094014a92e4d0d2 [ 917.076085] env[61273]: DEBUG nova.network.neutron [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] Successfully created port: 1d5b23b5-829f-42fc-bf5c-915565c6b728 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 917.231735] env[61273]: DEBUG nova.compute.manager [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 917.231735] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Expecting reply to msg 4311f5b331f34125a5b6c80b6fef9e13 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 917.272187] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4311f5b331f34125a5b6c80b6fef9e13 [ 917.301592] env[61273]: DEBUG nova.network.neutron [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: a70f220a-fa34-44af-939f-29292b556897] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 917.369168] env[61273]: DEBUG nova.network.neutron [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: a70f220a-fa34-44af-939f-29292b556897] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 917.369168] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 91bd75a543df4f43bbb899953da2b2ae in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 917.378530] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 91bd75a543df4f43bbb899953da2b2ae [ 917.411481] env[61273]: DEBUG nova.scheduler.client.report [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 917.413982] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg fb92fb2383194bf58fe42af4b8f950de in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 917.425752] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fb92fb2383194bf58fe42af4b8f950de [ 917.705694] env[61273]: DEBUG nova.compute.manager [req-bb106a0e-8ba7-434f-bab1-9216baf88ca2 req-571a58e4-731e-4a30-8c1e-cb50d8561b1b service nova] [instance: a70f220a-fa34-44af-939f-29292b556897] Received event network-vif-deleted-25c7c234-2cad-4819-a66e-0c2b3bc4be20 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 917.736145] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Expecting reply to msg 42ab20b228c04d24b80d3e15365c7b0e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 917.770823] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 42ab20b228c04d24b80d3e15365c7b0e [ 917.874244] env[61273]: DEBUG oslo_concurrency.lockutils [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Releasing lock "refresh_cache-a70f220a-fa34-44af-939f-29292b556897" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 917.874673] env[61273]: DEBUG nova.compute.manager [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: a70f220a-fa34-44af-939f-29292b556897] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 917.874868] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: a70f220a-fa34-44af-939f-29292b556897] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 917.875183] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e453bd0d-0cde-49c1-8b65-58990adddcb1 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.884585] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b197e035-a9fa-4343-bffb-51bf52a74496 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.910705] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: a70f220a-fa34-44af-939f-29292b556897] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a70f220a-fa34-44af-939f-29292b556897 could not be found. [ 917.910964] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: a70f220a-fa34-44af-939f-29292b556897] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 917.911222] env[61273]: INFO nova.compute.manager [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: a70f220a-fa34-44af-939f-29292b556897] Took 0.04 seconds to destroy the instance on the hypervisor. [ 917.911482] env[61273]: DEBUG oslo.service.loopingcall [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 917.911730] env[61273]: DEBUG nova.compute.manager [-] [instance: a70f220a-fa34-44af-939f-29292b556897] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 917.911832] env[61273]: DEBUG nova.network.neutron [-] [instance: a70f220a-fa34-44af-939f-29292b556897] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 917.916427] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.700s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 917.917487] env[61273]: ERROR nova.compute.manager [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 1c1b5e97-af36-4179-8875-68e15da56408, please check neutron logs for more information. [ 917.917487] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] Traceback (most recent call last): [ 917.917487] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 917.917487] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] self.driver.spawn(context, instance, image_meta, [ 917.917487] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 917.917487] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 917.917487] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 917.917487] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] vm_ref = self.build_virtual_machine(instance, [ 917.917487] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 917.917487] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] vif_infos = vmwarevif.get_vif_info(self._session, [ 917.917487] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 917.918122] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] for vif in network_info: [ 917.918122] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 917.918122] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] return self._sync_wrapper(fn, *args, **kwargs) [ 917.918122] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 917.918122] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] self.wait() [ 917.918122] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 917.918122] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] self[:] = self._gt.wait() [ 917.918122] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 917.918122] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] return self._exit_event.wait() [ 917.918122] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 917.918122] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] result = hub.switch() [ 917.918122] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 917.918122] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] return self.greenlet.switch() [ 917.918856] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 917.918856] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] result = function(*args, **kwargs) [ 917.918856] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 917.918856] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] return func(*args, **kwargs) [ 917.918856] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 917.918856] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] raise e [ 917.918856] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 917.918856] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] nwinfo = self.network_api.allocate_for_instance( [ 917.918856] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 917.918856] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] created_port_ids = self._update_ports_for_instance( [ 917.918856] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 917.918856] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] with excutils.save_and_reraise_exception(): [ 917.918856] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 917.920316] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] self.force_reraise() [ 917.920316] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 917.920316] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] raise self.value [ 917.920316] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 917.920316] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] updated_port = self._update_port( [ 917.920316] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 917.920316] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] _ensure_no_port_binding_failure(port) [ 917.920316] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 917.920316] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] raise exception.PortBindingFailed(port_id=port['id']) [ 917.920316] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] nova.exception.PortBindingFailed: Binding failed for port 1c1b5e97-af36-4179-8875-68e15da56408, please check neutron logs for more information. [ 917.920316] env[61273]: ERROR nova.compute.manager [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] [ 917.920675] env[61273]: DEBUG nova.compute.utils [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] Binding failed for port 1c1b5e97-af36-4179-8875-68e15da56408, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 917.920675] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 12.085s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 917.920830] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg 6cbf8f503a38470daf918c199be9d163 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 917.922064] env[61273]: DEBUG nova.compute.manager [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] Build of instance f9e23014-2fe0-4aab-b03c-8759dc1e5eb0 was re-scheduled: Binding failed for port 1c1b5e97-af36-4179-8875-68e15da56408, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 917.922529] env[61273]: DEBUG nova.compute.manager [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 917.922813] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Acquiring lock "refresh_cache-f9e23014-2fe0-4aab-b03c-8759dc1e5eb0" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 917.922966] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Acquired lock "refresh_cache-f9e23014-2fe0-4aab-b03c-8759dc1e5eb0" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 917.923121] env[61273]: DEBUG nova.network.neutron [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 917.923488] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg d9355d6827a1479eb8f39a83314233d3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 917.926610] env[61273]: DEBUG nova.network.neutron [-] [instance: a70f220a-fa34-44af-939f-29292b556897] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 917.927065] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 01570814f41e480a8ca2c91f3e6f217a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 917.933649] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d9355d6827a1479eb8f39a83314233d3 [ 917.936614] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 01570814f41e480a8ca2c91f3e6f217a [ 917.952415] env[61273]: ERROR nova.compute.manager [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 1d5b23b5-829f-42fc-bf5c-915565c6b728, please check neutron logs for more information. [ 917.952415] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 917.952415] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 917.952415] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 917.952415] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 917.952415] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 917.952415] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 917.952415] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 917.952415] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 917.952415] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 917.952415] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 917.952415] env[61273]: ERROR nova.compute.manager raise self.value [ 917.952415] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 917.952415] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 917.952415] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 917.952415] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 917.952991] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 917.952991] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 917.952991] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 1d5b23b5-829f-42fc-bf5c-915565c6b728, please check neutron logs for more information. [ 917.952991] env[61273]: ERROR nova.compute.manager [ 917.952991] env[61273]: Traceback (most recent call last): [ 917.952991] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 917.952991] env[61273]: listener.cb(fileno) [ 917.952991] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 917.952991] env[61273]: result = function(*args, **kwargs) [ 917.952991] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 917.952991] env[61273]: return func(*args, **kwargs) [ 917.952991] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 917.952991] env[61273]: raise e [ 917.952991] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 917.952991] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 917.952991] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 917.952991] env[61273]: created_port_ids = self._update_ports_for_instance( [ 917.952991] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 917.952991] env[61273]: with excutils.save_and_reraise_exception(): [ 917.952991] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 917.952991] env[61273]: self.force_reraise() [ 917.952991] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 917.952991] env[61273]: raise self.value [ 917.952991] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 917.952991] env[61273]: updated_port = self._update_port( [ 917.952991] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 917.952991] env[61273]: _ensure_no_port_binding_failure(port) [ 917.952991] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 917.952991] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 917.953956] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 1d5b23b5-829f-42fc-bf5c-915565c6b728, please check neutron logs for more information. [ 917.953956] env[61273]: Removing descriptor: 15 [ 917.971160] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6cbf8f503a38470daf918c199be9d163 [ 918.240830] env[61273]: DEBUG nova.compute.manager [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 918.287834] env[61273]: DEBUG nova.virt.hardware [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 918.288110] env[61273]: DEBUG nova.virt.hardware [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 918.288270] env[61273]: DEBUG nova.virt.hardware [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 918.288449] env[61273]: DEBUG nova.virt.hardware [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 918.288589] env[61273]: DEBUG nova.virt.hardware [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 918.288735] env[61273]: DEBUG nova.virt.hardware [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 918.288936] env[61273]: DEBUG nova.virt.hardware [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 918.289093] env[61273]: DEBUG nova.virt.hardware [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 918.289250] env[61273]: DEBUG nova.virt.hardware [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 918.289404] env[61273]: DEBUG nova.virt.hardware [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 918.289572] env[61273]: DEBUG nova.virt.hardware [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 918.290424] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cee36d5-ff94-46d1-9dd5-d41573ce468e {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.298217] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dc3a80f-35ac-4b4a-bc13-05271610a4be {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.312183] env[61273]: ERROR nova.compute.manager [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 1d5b23b5-829f-42fc-bf5c-915565c6b728, please check neutron logs for more information. [ 918.312183] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] Traceback (most recent call last): [ 918.312183] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 918.312183] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] yield resources [ 918.312183] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 918.312183] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] self.driver.spawn(context, instance, image_meta, [ 918.312183] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 918.312183] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] self._vmops.spawn(context, instance, image_meta, injected_files, [ 918.312183] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 918.312183] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] vm_ref = self.build_virtual_machine(instance, [ 918.312183] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 918.312608] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] vif_infos = vmwarevif.get_vif_info(self._session, [ 918.312608] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 918.312608] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] for vif in network_info: [ 918.312608] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 918.312608] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] return self._sync_wrapper(fn, *args, **kwargs) [ 918.312608] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 918.312608] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] self.wait() [ 918.312608] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 918.312608] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] self[:] = self._gt.wait() [ 918.312608] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 918.312608] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] return self._exit_event.wait() [ 918.312608] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 918.312608] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] current.throw(*self._exc) [ 918.313031] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 918.313031] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] result = function(*args, **kwargs) [ 918.313031] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 918.313031] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] return func(*args, **kwargs) [ 918.313031] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 918.313031] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] raise e [ 918.313031] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 918.313031] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] nwinfo = self.network_api.allocate_for_instance( [ 918.313031] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 918.313031] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] created_port_ids = self._update_ports_for_instance( [ 918.313031] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 918.313031] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] with excutils.save_and_reraise_exception(): [ 918.313031] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 918.313452] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] self.force_reraise() [ 918.313452] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 918.313452] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] raise self.value [ 918.313452] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 918.313452] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] updated_port = self._update_port( [ 918.313452] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 918.313452] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] _ensure_no_port_binding_failure(port) [ 918.313452] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 918.313452] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] raise exception.PortBindingFailed(port_id=port['id']) [ 918.313452] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] nova.exception.PortBindingFailed: Binding failed for port 1d5b23b5-829f-42fc-bf5c-915565c6b728, please check neutron logs for more information. [ 918.313452] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] [ 918.313452] env[61273]: INFO nova.compute.manager [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] Terminating instance [ 918.314532] env[61273]: DEBUG oslo_concurrency.lockutils [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Acquiring lock "refresh_cache-c9214700-faf8-4a26-8084-ffe4a2c06480" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 918.314679] env[61273]: DEBUG oslo_concurrency.lockutils [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Acquired lock "refresh_cache-c9214700-faf8-4a26-8084-ffe4a2c06480" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 918.314852] env[61273]: DEBUG nova.network.neutron [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 918.315320] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Expecting reply to msg 0cc1a3bebe5c411a94bc823fc2330d7d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 918.322044] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0cc1a3bebe5c411a94bc823fc2330d7d [ 918.432567] env[61273]: DEBUG nova.network.neutron [-] [instance: a70f220a-fa34-44af-939f-29292b556897] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 918.432567] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 2cf9134eb01b4477be37f91dcb917b39 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 918.445408] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2cf9134eb01b4477be37f91dcb917b39 [ 918.452409] env[61273]: DEBUG nova.network.neutron [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 918.541037] env[61273]: DEBUG nova.network.neutron [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 918.541548] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg 6e32b11a0306428e80f6f3c093156c81 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 918.550927] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6e32b11a0306428e80f6f3c093156c81 [ 918.567975] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-744950ef-b6e6-41e4-8781-260fe35d10e0 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.575890] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-106fbdba-e0ef-460a-9b28-c2a3964c8922 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.606968] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36c40088-fabf-4a81-a038-42c00bbbfbb5 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.614016] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f4fbacf-f18f-4a50-aea8-cca30c4bc968 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.627315] env[61273]: DEBUG nova.compute.provider_tree [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 918.627794] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg bd32306a0b754e7583e41139e0d71b02 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 918.634853] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bd32306a0b754e7583e41139e0d71b02 [ 918.833930] env[61273]: DEBUG nova.network.neutron [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 918.908240] env[61273]: DEBUG nova.network.neutron [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 918.908798] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Expecting reply to msg 2b0a690647904cfd9375562f72a37e38 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 918.918123] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2b0a690647904cfd9375562f72a37e38 [ 918.933836] env[61273]: INFO nova.compute.manager [-] [instance: a70f220a-fa34-44af-939f-29292b556897] Took 1.02 seconds to deallocate network for instance. [ 918.936286] env[61273]: DEBUG nova.compute.claims [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: a70f220a-fa34-44af-939f-29292b556897] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 918.936491] env[61273]: DEBUG oslo_concurrency.lockutils [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 919.045268] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Releasing lock "refresh_cache-f9e23014-2fe0-4aab-b03c-8759dc1e5eb0" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 919.045511] env[61273]: DEBUG nova.compute.manager [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 919.045688] env[61273]: DEBUG nova.compute.manager [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 919.045848] env[61273]: DEBUG nova.network.neutron [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 919.067698] env[61273]: DEBUG nova.network.neutron [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 919.068294] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg 77561ce2261e478380ee52acdbc4b240 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 919.074938] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 77561ce2261e478380ee52acdbc4b240 [ 919.129946] env[61273]: DEBUG nova.scheduler.client.report [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 919.132453] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg a8ac712294914279a24b721ceb121e56 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 919.147639] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a8ac712294914279a24b721ceb121e56 [ 919.411984] env[61273]: DEBUG oslo_concurrency.lockutils [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Releasing lock "refresh_cache-c9214700-faf8-4a26-8084-ffe4a2c06480" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 919.412451] env[61273]: DEBUG nova.compute.manager [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 919.412639] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 919.413907] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-89cd4bcd-5535-4254-b783-504cb32a9c06 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.422421] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f019b9a-29ab-4da6-b8fd-5b687a38b7bf {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.443153] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c9214700-faf8-4a26-8084-ffe4a2c06480 could not be found. [ 919.443390] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 919.443551] env[61273]: INFO nova.compute.manager [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] Took 0.03 seconds to destroy the instance on the hypervisor. [ 919.443788] env[61273]: DEBUG oslo.service.loopingcall [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 919.443995] env[61273]: DEBUG nova.compute.manager [-] [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 919.444098] env[61273]: DEBUG nova.network.neutron [-] [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 919.458726] env[61273]: DEBUG nova.network.neutron [-] [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 919.459203] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 76f557e86b0244268ce4e3161479d9b4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 919.465650] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 76f557e86b0244268ce4e3161479d9b4 [ 919.570294] env[61273]: DEBUG nova.network.neutron [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 919.570781] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg baacd1ca11d84b65af6a9d974581bbff in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 919.579521] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg baacd1ca11d84b65af6a9d974581bbff [ 919.635082] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.716s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 919.635715] env[61273]: ERROR nova.compute.manager [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 39750689-9293-4d1a-b24b-04470262e9e6, please check neutron logs for more information. [ 919.635715] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] Traceback (most recent call last): [ 919.635715] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 919.635715] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] self.driver.spawn(context, instance, image_meta, [ 919.635715] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 919.635715] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] self._vmops.spawn(context, instance, image_meta, injected_files, [ 919.635715] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 919.635715] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] vm_ref = self.build_virtual_machine(instance, [ 919.635715] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 919.635715] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] vif_infos = vmwarevif.get_vif_info(self._session, [ 919.635715] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 919.636113] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] for vif in network_info: [ 919.636113] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 919.636113] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] return self._sync_wrapper(fn, *args, **kwargs) [ 919.636113] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 919.636113] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] self.wait() [ 919.636113] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 919.636113] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] self[:] = self._gt.wait() [ 919.636113] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 919.636113] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] return self._exit_event.wait() [ 919.636113] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 919.636113] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] current.throw(*self._exc) [ 919.636113] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 919.636113] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] result = function(*args, **kwargs) [ 919.636512] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 919.636512] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] return func(*args, **kwargs) [ 919.636512] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 919.636512] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] raise e [ 919.636512] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 919.636512] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] nwinfo = self.network_api.allocate_for_instance( [ 919.636512] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 919.636512] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] created_port_ids = self._update_ports_for_instance( [ 919.636512] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 919.636512] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] with excutils.save_and_reraise_exception(): [ 919.636512] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 919.636512] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] self.force_reraise() [ 919.636512] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 919.636899] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] raise self.value [ 919.636899] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 919.636899] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] updated_port = self._update_port( [ 919.636899] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 919.636899] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] _ensure_no_port_binding_failure(port) [ 919.636899] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 919.636899] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] raise exception.PortBindingFailed(port_id=port['id']) [ 919.636899] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] nova.exception.PortBindingFailed: Binding failed for port 39750689-9293-4d1a-b24b-04470262e9e6, please check neutron logs for more information. [ 919.636899] env[61273]: ERROR nova.compute.manager [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] [ 919.636899] env[61273]: DEBUG nova.compute.utils [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] Binding failed for port 39750689-9293-4d1a-b24b-04470262e9e6, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 919.637613] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 11.761s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 919.639959] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 8135c86b34944d039809bd823519bcd8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 919.641108] env[61273]: DEBUG nova.compute.manager [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] Build of instance d4a2025d-c128-45a2-b74c-a7fd2630d615 was re-scheduled: Binding failed for port 39750689-9293-4d1a-b24b-04470262e9e6, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 919.641493] env[61273]: DEBUG nova.compute.manager [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 919.641750] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Acquiring lock "refresh_cache-d4a2025d-c128-45a2-b74c-a7fd2630d615" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 919.641904] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Acquired lock "refresh_cache-d4a2025d-c128-45a2-b74c-a7fd2630d615" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 919.642060] env[61273]: DEBUG nova.network.neutron [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 919.642409] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg ad37f08e159a4bee859027435781b5b7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 919.649346] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ad37f08e159a4bee859027435781b5b7 [ 919.672937] env[61273]: DEBUG nova.network.neutron [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 919.676605] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8135c86b34944d039809bd823519bcd8 [ 919.736134] env[61273]: DEBUG nova.compute.manager [req-4534d7c3-697f-4ebe-976b-a8ed95989d3a req-75328f22-1073-4544-98a3-2de7fc41bf38 service nova] [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] Received event network-changed-1d5b23b5-829f-42fc-bf5c-915565c6b728 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 919.736442] env[61273]: DEBUG nova.compute.manager [req-4534d7c3-697f-4ebe-976b-a8ed95989d3a req-75328f22-1073-4544-98a3-2de7fc41bf38 service nova] [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] Refreshing instance network info cache due to event network-changed-1d5b23b5-829f-42fc-bf5c-915565c6b728. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 919.736712] env[61273]: DEBUG oslo_concurrency.lockutils [req-4534d7c3-697f-4ebe-976b-a8ed95989d3a req-75328f22-1073-4544-98a3-2de7fc41bf38 service nova] Acquiring lock "refresh_cache-c9214700-faf8-4a26-8084-ffe4a2c06480" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 919.736908] env[61273]: DEBUG oslo_concurrency.lockutils [req-4534d7c3-697f-4ebe-976b-a8ed95989d3a req-75328f22-1073-4544-98a3-2de7fc41bf38 service nova] Acquired lock "refresh_cache-c9214700-faf8-4a26-8084-ffe4a2c06480" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 919.737482] env[61273]: DEBUG nova.network.neutron [req-4534d7c3-697f-4ebe-976b-a8ed95989d3a req-75328f22-1073-4544-98a3-2de7fc41bf38 service nova] [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] Refreshing network info cache for port 1d5b23b5-829f-42fc-bf5c-915565c6b728 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 919.738037] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-4534d7c3-697f-4ebe-976b-a8ed95989d3a req-75328f22-1073-4544-98a3-2de7fc41bf38 service nova] Expecting reply to msg aeba2b20416244bbaa249275c9033a60 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 919.745027] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aeba2b20416244bbaa249275c9033a60 [ 919.757603] env[61273]: DEBUG nova.network.neutron [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 919.758277] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg 4ac6709bb86a4225b6a205c0dd314211 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 919.766239] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4ac6709bb86a4225b6a205c0dd314211 [ 919.961387] env[61273]: DEBUG nova.network.neutron [-] [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 919.962278] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 268607cc5a3f4eef8190e3cf304c7f6e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 919.971382] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 268607cc5a3f4eef8190e3cf304c7f6e [ 920.079894] env[61273]: INFO nova.compute.manager [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: f9e23014-2fe0-4aab-b03c-8759dc1e5eb0] Took 1.03 seconds to deallocate network for instance. [ 920.081913] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg 201f889b93ea49cc86ab1029c0e55871 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 920.131261] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 201f889b93ea49cc86ab1029c0e55871 [ 920.260365] env[61273]: DEBUG nova.network.neutron [req-4534d7c3-697f-4ebe-976b-a8ed95989d3a req-75328f22-1073-4544-98a3-2de7fc41bf38 service nova] [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 920.262232] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Releasing lock "refresh_cache-d4a2025d-c128-45a2-b74c-a7fd2630d615" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 920.262519] env[61273]: DEBUG nova.compute.manager [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 920.262660] env[61273]: DEBUG nova.compute.manager [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 920.262854] env[61273]: DEBUG nova.network.neutron [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 920.276699] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-518ed5fc-32d0-479e-bc95-532186591e85 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.284433] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3dc67f7-9a48-4cbe-94aa-2f9fec64bc13 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.315333] env[61273]: DEBUG nova.network.neutron [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 920.315911] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg 0ffca89391b948888aff6190d2147153 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 920.317236] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56612992-a960-4674-8967-0132872c0f95 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.324140] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0ffca89391b948888aff6190d2147153 [ 920.325455] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00a4fe18-b1aa-4eaa-b88a-05259601b995 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.341244] env[61273]: DEBUG nova.compute.provider_tree [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 920.341741] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 5a3d1cc8c76c47219010e534a9647dbb in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 920.349850] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5a3d1cc8c76c47219010e534a9647dbb [ 920.357176] env[61273]: DEBUG nova.network.neutron [req-4534d7c3-697f-4ebe-976b-a8ed95989d3a req-75328f22-1073-4544-98a3-2de7fc41bf38 service nova] [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 920.357702] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-4534d7c3-697f-4ebe-976b-a8ed95989d3a req-75328f22-1073-4544-98a3-2de7fc41bf38 service nova] Expecting reply to msg 09bba6aee50a461f95f022766145abf7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 920.366253] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 09bba6aee50a461f95f022766145abf7 [ 920.465255] env[61273]: INFO nova.compute.manager [-] [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] Took 1.02 seconds to deallocate network for instance. [ 920.467768] env[61273]: DEBUG nova.compute.claims [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 920.467953] env[61273]: DEBUG oslo_concurrency.lockutils [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 920.588543] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg 8c7818c3d0c44c0793ee45f1d8edbeb2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 920.623043] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8c7818c3d0c44c0793ee45f1d8edbeb2 [ 920.820461] env[61273]: DEBUG nova.network.neutron [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 920.821017] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg 0517edbb3dbb40a7823fe63c099ec66a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 920.832136] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0517edbb3dbb40a7823fe63c099ec66a [ 920.844097] env[61273]: DEBUG nova.scheduler.client.report [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 920.846588] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 7cbd6d4df53c4ad9883a57b31b65a4b4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 920.860060] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7cbd6d4df53c4ad9883a57b31b65a4b4 [ 920.861751] env[61273]: DEBUG oslo_concurrency.lockutils [req-4534d7c3-697f-4ebe-976b-a8ed95989d3a req-75328f22-1073-4544-98a3-2de7fc41bf38 service nova] Releasing lock "refresh_cache-c9214700-faf8-4a26-8084-ffe4a2c06480" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 920.862214] env[61273]: DEBUG nova.compute.manager [req-4534d7c3-697f-4ebe-976b-a8ed95989d3a req-75328f22-1073-4544-98a3-2de7fc41bf38 service nova] [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] Received event network-vif-deleted-1d5b23b5-829f-42fc-bf5c-915565c6b728 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 921.116761] env[61273]: INFO nova.scheduler.client.report [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Deleted allocations for instance f9e23014-2fe0-4aab-b03c-8759dc1e5eb0 [ 921.123405] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg 4fcbde9d6b504876a13c93b3aef0f8c6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 921.141340] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4fcbde9d6b504876a13c93b3aef0f8c6 [ 921.323083] env[61273]: INFO nova.compute.manager [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: d4a2025d-c128-45a2-b74c-a7fd2630d615] Took 1.06 seconds to deallocate network for instance. [ 921.324980] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg 136b0c9b7f974331839e4df7173bf349 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 921.349737] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.712s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 921.350383] env[61273]: ERROR nova.compute.manager [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 9ea1dfca-8560-4521-8e33-50b5c0f74903, please check neutron logs for more information. [ 921.350383] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] Traceback (most recent call last): [ 921.350383] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 921.350383] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] self.driver.spawn(context, instance, image_meta, [ 921.350383] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 921.350383] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 921.350383] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 921.350383] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] vm_ref = self.build_virtual_machine(instance, [ 921.350383] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 921.350383] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] vif_infos = vmwarevif.get_vif_info(self._session, [ 921.350383] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 921.350739] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] for vif in network_info: [ 921.350739] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 921.350739] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] return self._sync_wrapper(fn, *args, **kwargs) [ 921.350739] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 921.350739] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] self.wait() [ 921.350739] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 921.350739] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] self[:] = self._gt.wait() [ 921.350739] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 921.350739] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] return self._exit_event.wait() [ 921.350739] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 921.350739] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] current.throw(*self._exc) [ 921.350739] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 921.350739] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] result = function(*args, **kwargs) [ 921.351099] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 921.351099] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] return func(*args, **kwargs) [ 921.351099] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 921.351099] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] raise e [ 921.351099] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 921.351099] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] nwinfo = self.network_api.allocate_for_instance( [ 921.351099] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 921.351099] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] created_port_ids = self._update_ports_for_instance( [ 921.351099] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 921.351099] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] with excutils.save_and_reraise_exception(): [ 921.351099] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 921.351099] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] self.force_reraise() [ 921.351099] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 921.351453] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] raise self.value [ 921.351453] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 921.351453] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] updated_port = self._update_port( [ 921.351453] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 921.351453] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] _ensure_no_port_binding_failure(port) [ 921.351453] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 921.351453] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] raise exception.PortBindingFailed(port_id=port['id']) [ 921.351453] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] nova.exception.PortBindingFailed: Binding failed for port 9ea1dfca-8560-4521-8e33-50b5c0f74903, please check neutron logs for more information. [ 921.351453] env[61273]: ERROR nova.compute.manager [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] [ 921.351453] env[61273]: DEBUG nova.compute.utils [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] Binding failed for port 9ea1dfca-8560-4521-8e33-50b5c0f74903, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 921.352861] env[61273]: DEBUG oslo_concurrency.lockutils [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.874s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 921.354248] env[61273]: INFO nova.compute.claims [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 921.357818] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg e7743e88e9a24881b98820eedcefb77a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 921.364257] env[61273]: DEBUG nova.compute.manager [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] Build of instance 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5 was re-scheduled: Binding failed for port 9ea1dfca-8560-4521-8e33-50b5c0f74903, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 921.364257] env[61273]: DEBUG nova.compute.manager [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 921.364257] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Acquiring lock "refresh_cache-1e7c4c50-1d33-4947-80bc-ac3ca2453fd5" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 921.364257] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Acquired lock "refresh_cache-1e7c4c50-1d33-4947-80bc-ac3ca2453fd5" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 921.364257] env[61273]: DEBUG nova.network.neutron [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 921.364474] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg e53d7e80238941a596d76c22db37ec16 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 921.372082] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 136b0c9b7f974331839e4df7173bf349 [ 921.373508] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e53d7e80238941a596d76c22db37ec16 [ 921.402841] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e7743e88e9a24881b98820eedcefb77a [ 921.626234] env[61273]: DEBUG oslo_concurrency.lockutils [None req-d5021fb1-9d31-48d9-94cb-9fc34639326e tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Lock "f9e23014-2fe0-4aab-b03c-8759dc1e5eb0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 80.627s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 921.829563] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg 22f60f6131bf4ae681ef58f55d5eadb5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 921.863909] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 7e6fa395f1fe43faa26313819d1af7b3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 921.874573] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7e6fa395f1fe43faa26313819d1af7b3 [ 921.882365] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 22f60f6131bf4ae681ef58f55d5eadb5 [ 921.887414] env[61273]: DEBUG nova.network.neutron [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 921.975385] env[61273]: DEBUG nova.network.neutron [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 921.975910] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg ad286965128e457083ea37df805676dc in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 921.984271] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ad286965128e457083ea37df805676dc [ 922.358769] env[61273]: INFO nova.scheduler.client.report [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Deleted allocations for instance d4a2025d-c128-45a2-b74c-a7fd2630d615 [ 922.365472] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg 306f27047f224dbe9d61191debf2f1f2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 922.382823] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 306f27047f224dbe9d61191debf2f1f2 [ 922.477889] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Releasing lock "refresh_cache-1e7c4c50-1d33-4947-80bc-ac3ca2453fd5" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 922.478198] env[61273]: DEBUG nova.compute.manager [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 922.478413] env[61273]: DEBUG nova.compute.manager [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 922.478612] env[61273]: DEBUG nova.network.neutron [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 922.482645] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28358169-9c3a-498c-bc3c-3fb6bd43ee1f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.491417] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce960915-a97d-48f6-868f-4f6d9c284463 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.524599] env[61273]: DEBUG nova.network.neutron [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 922.525277] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 78b002113c53467983cd56a1a09c2ad0 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 922.532511] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8147602e-80bf-4b2a-8059-deac829ebbf3 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.535793] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 78b002113c53467983cd56a1a09c2ad0 [ 922.536422] env[61273]: DEBUG nova.network.neutron [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 922.536868] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg fa6f8d1af2d94296b8bd13159f070b4c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 922.543336] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94f35670-3368-47c5-872a-8147aef5a168 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.548034] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fa6f8d1af2d94296b8bd13159f070b4c [ 922.558005] env[61273]: DEBUG nova.compute.provider_tree [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 922.558478] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 63160456271d4354b2e4de39b4b89d4f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 922.564931] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 63160456271d4354b2e4de39b4b89d4f [ 922.872384] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5bfe44ca-20c4-4039-8e97-e4fefc8b27b1 tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Lock "d4a2025d-c128-45a2-b74c-a7fd2630d615" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.225s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 923.040099] env[61273]: INFO nova.compute.manager [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] [instance: 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5] Took 0.56 seconds to deallocate network for instance. [ 923.041830] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg a980d6c703b24cfd93c24e2a021f9475 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 923.062380] env[61273]: DEBUG nova.scheduler.client.report [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 923.064996] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg d4140136b7ac42ff8f5fae7602d530d5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 923.077319] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a980d6c703b24cfd93c24e2a021f9475 [ 923.078723] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d4140136b7ac42ff8f5fae7602d530d5 [ 923.547417] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg dcd7e463c1704e1aaa5b59a42b22d8d2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 923.572029] env[61273]: DEBUG oslo_concurrency.lockutils [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.215s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 923.572029] env[61273]: DEBUG nova.compute.manager [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 923.572029] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 6c7a951bcce1491281cfcfcfb5dd4d45 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 923.572029] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.270s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 923.573479] env[61273]: INFO nova.compute.claims [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 923.575164] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 0a8e1c6a9bc94e26b4214856b145c701 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 923.578023] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dcd7e463c1704e1aaa5b59a42b22d8d2 [ 923.601306] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6c7a951bcce1491281cfcfcfb5dd4d45 [ 923.613569] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0a8e1c6a9bc94e26b4214856b145c701 [ 924.076812] env[61273]: INFO nova.scheduler.client.report [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Deleted allocations for instance 1e7c4c50-1d33-4947-80bc-ac3ca2453fd5 [ 924.089625] env[61273]: DEBUG nova.compute.utils [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 924.090162] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 4cb54c75c87344e88a4a75a3e094bdef in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 924.092095] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 7138f841b9d44fe5a680fe6d006d2ecd in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 924.093230] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Expecting reply to msg 0922ed1ae1a3450ab08abeb439e7382b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 924.093911] env[61273]: DEBUG nova.compute.manager [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 924.094089] env[61273]: DEBUG nova.network.neutron [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 924.103428] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4cb54c75c87344e88a4a75a3e094bdef [ 924.112176] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7138f841b9d44fe5a680fe6d006d2ecd [ 924.113586] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0922ed1ae1a3450ab08abeb439e7382b [ 924.145016] env[61273]: DEBUG nova.policy [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'af34c4e3d81c4729a9dd4a8531992ff1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd9325f1def284d2a9fdced4e9eeb17f0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 924.556280] env[61273]: DEBUG nova.network.neutron [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] Successfully created port: 7e195214-958a-4642-8567-f18b2b01dc9e {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 924.594029] env[61273]: DEBUG nova.compute.manager [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 924.596148] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 07ff25533e154cc29dc2aa98706e68a1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 924.611091] env[61273]: DEBUG oslo_concurrency.lockutils [None req-b27a52f7-e161-421f-916e-111f7420cad0 tempest-ImagesTestJSON-1894548440 tempest-ImagesTestJSON-1894548440-project-member] Lock "1e7c4c50-1d33-4947-80bc-ac3ca2453fd5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 70.817s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 924.643074] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 07ff25533e154cc29dc2aa98706e68a1 [ 924.728389] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12130666-a473-4cb8-a495-82158af2c2e7 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.736051] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a09207e-de7a-4114-8230-8fad0fa86bca {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.769698] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1db26f2-7066-44ee-8b33-49050bbf9eb4 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.779034] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fa0299c-d2a7-4d5e-be53-b098e4e900c1 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.793335] env[61273]: DEBUG nova.compute.provider_tree [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 924.793849] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg b65986c049e04dab9c412e581b35e2ba in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 924.804338] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b65986c049e04dab9c412e581b35e2ba [ 924.804963] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Acquiring lock "80491222-910b-48e1-a431-3116c336a9a5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 924.805180] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Lock "80491222-910b-48e1-a431-3116c336a9a5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 924.805617] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg f7ca70df100c4059b714c3250e6f751c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 924.816486] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f7ca70df100c4059b714c3250e6f751c [ 925.090370] env[61273]: DEBUG oslo_concurrency.lockutils [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Acquiring lock "ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 925.090610] env[61273]: DEBUG oslo_concurrency.lockutils [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Lock "ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 925.091054] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg d6cd3631b38f433f9bd49d6aa79ede5b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 925.114596] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg af08c0b27a4c4cfabdfd7e3bd0f87869 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 925.115719] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d6cd3631b38f433f9bd49d6aa79ede5b [ 925.159402] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg af08c0b27a4c4cfabdfd7e3bd0f87869 [ 925.305118] env[61273]: DEBUG nova.scheduler.client.report [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 925.307429] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 7ac166542cbf4b6792be86af65815d78 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 925.308441] env[61273]: DEBUG nova.compute.manager [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 80491222-910b-48e1-a431-3116c336a9a5] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 925.310174] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg 498375b86b544a51a2055b5ef1af8a62 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 925.322415] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7ac166542cbf4b6792be86af65815d78 [ 925.353204] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 498375b86b544a51a2055b5ef1af8a62 [ 925.578104] env[61273]: DEBUG nova.compute.manager [req-378ddbe4-4530-4756-ad31-8506f0943c9d req-ca2ff31e-fc38-4bac-9a86-b510f0715549 service nova] [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] Received event network-changed-7e195214-958a-4642-8567-f18b2b01dc9e {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 925.578508] env[61273]: DEBUG nova.compute.manager [req-378ddbe4-4530-4756-ad31-8506f0943c9d req-ca2ff31e-fc38-4bac-9a86-b510f0715549 service nova] [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] Refreshing instance network info cache due to event network-changed-7e195214-958a-4642-8567-f18b2b01dc9e. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 925.578508] env[61273]: DEBUG oslo_concurrency.lockutils [req-378ddbe4-4530-4756-ad31-8506f0943c9d req-ca2ff31e-fc38-4bac-9a86-b510f0715549 service nova] Acquiring lock "refresh_cache-77ca81dc-6322-41de-aaee-adf36d6ce79f" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 925.578661] env[61273]: DEBUG oslo_concurrency.lockutils [req-378ddbe4-4530-4756-ad31-8506f0943c9d req-ca2ff31e-fc38-4bac-9a86-b510f0715549 service nova] Acquired lock "refresh_cache-77ca81dc-6322-41de-aaee-adf36d6ce79f" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 925.578767] env[61273]: DEBUG nova.network.neutron [req-378ddbe4-4530-4756-ad31-8506f0943c9d req-ca2ff31e-fc38-4bac-9a86-b510f0715549 service nova] [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] Refreshing network info cache for port 7e195214-958a-4642-8567-f18b2b01dc9e {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 925.579176] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-378ddbe4-4530-4756-ad31-8506f0943c9d req-ca2ff31e-fc38-4bac-9a86-b510f0715549 service nova] Expecting reply to msg 7264662ee5454e68b638b9dedaad07b3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 925.588567] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7264662ee5454e68b638b9dedaad07b3 [ 925.603151] env[61273]: DEBUG nova.compute.manager [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 925.609379] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg 60f832997c814f63b2387dafd26cf93e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 925.623439] env[61273]: DEBUG nova.compute.manager [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 925.660332] env[61273]: DEBUG nova.virt.hardware [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 925.660561] env[61273]: DEBUG nova.virt.hardware [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 925.660711] env[61273]: DEBUG nova.virt.hardware [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 925.660893] env[61273]: DEBUG nova.virt.hardware [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 925.661048] env[61273]: DEBUG nova.virt.hardware [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 925.661209] env[61273]: DEBUG nova.virt.hardware [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 925.661435] env[61273]: DEBUG nova.virt.hardware [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 925.661586] env[61273]: DEBUG nova.virt.hardware [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 925.661812] env[61273]: DEBUG nova.virt.hardware [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 925.661998] env[61273]: DEBUG nova.virt.hardware [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 925.662176] env[61273]: DEBUG nova.virt.hardware [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 925.663150] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b6e4372-63e4-4e03-9ff0-a0d983a80523 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.672401] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3712847-289e-45d5-a9e5-d0a04e68f035 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.687452] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 60f832997c814f63b2387dafd26cf93e [ 925.812605] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.241s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 925.813158] env[61273]: DEBUG nova.compute.manager [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 925.815540] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg dc8dc4d085794242ba77c6248ba0a4fb in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 925.816033] env[61273]: DEBUG oslo_concurrency.lockutils [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.446s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 925.817992] env[61273]: INFO nova.compute.claims [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] [instance: 2178d195-47f4-47ab-9140-b8f849973434] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 925.819477] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] Expecting reply to msg 2a7676a63b2245d1aaced3fc451bcd26 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 925.849632] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 925.868117] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dc8dc4d085794242ba77c6248ba0a4fb [ 925.869575] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2a7676a63b2245d1aaced3fc451bcd26 [ 925.908340] env[61273]: ERROR nova.compute.manager [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 7e195214-958a-4642-8567-f18b2b01dc9e, please check neutron logs for more information. [ 925.908340] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 925.908340] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 925.908340] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 925.908340] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 925.908340] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 925.908340] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 925.908340] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 925.908340] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 925.908340] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 925.908340] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 925.908340] env[61273]: ERROR nova.compute.manager raise self.value [ 925.908340] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 925.908340] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 925.908340] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 925.908340] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 925.908951] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 925.908951] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 925.908951] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 7e195214-958a-4642-8567-f18b2b01dc9e, please check neutron logs for more information. [ 925.908951] env[61273]: ERROR nova.compute.manager [ 925.908951] env[61273]: Traceback (most recent call last): [ 925.908951] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 925.908951] env[61273]: listener.cb(fileno) [ 925.908951] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 925.908951] env[61273]: result = function(*args, **kwargs) [ 925.908951] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 925.908951] env[61273]: return func(*args, **kwargs) [ 925.908951] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 925.908951] env[61273]: raise e [ 925.908951] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 925.908951] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 925.908951] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 925.908951] env[61273]: created_port_ids = self._update_ports_for_instance( [ 925.908951] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 925.908951] env[61273]: with excutils.save_and_reraise_exception(): [ 925.908951] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 925.908951] env[61273]: self.force_reraise() [ 925.908951] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 925.908951] env[61273]: raise self.value [ 925.908951] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 925.908951] env[61273]: updated_port = self._update_port( [ 925.908951] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 925.908951] env[61273]: _ensure_no_port_binding_failure(port) [ 925.908951] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 925.908951] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 925.909902] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 7e195214-958a-4642-8567-f18b2b01dc9e, please check neutron logs for more information. [ 925.909902] env[61273]: Removing descriptor: 15 [ 925.909902] env[61273]: ERROR nova.compute.manager [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 7e195214-958a-4642-8567-f18b2b01dc9e, please check neutron logs for more information. [ 925.909902] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] Traceback (most recent call last): [ 925.909902] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 925.909902] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] yield resources [ 925.909902] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 925.909902] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] self.driver.spawn(context, instance, image_meta, [ 925.909902] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 925.909902] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 925.909902] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 925.909902] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] vm_ref = self.build_virtual_machine(instance, [ 925.910338] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 925.910338] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] vif_infos = vmwarevif.get_vif_info(self._session, [ 925.910338] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 925.910338] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] for vif in network_info: [ 925.910338] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 925.910338] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] return self._sync_wrapper(fn, *args, **kwargs) [ 925.910338] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 925.910338] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] self.wait() [ 925.910338] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 925.910338] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] self[:] = self._gt.wait() [ 925.910338] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 925.910338] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] return self._exit_event.wait() [ 925.910338] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 925.910862] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] result = hub.switch() [ 925.910862] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 925.910862] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] return self.greenlet.switch() [ 925.910862] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 925.910862] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] result = function(*args, **kwargs) [ 925.910862] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 925.910862] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] return func(*args, **kwargs) [ 925.910862] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 925.910862] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] raise e [ 925.910862] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 925.910862] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] nwinfo = self.network_api.allocate_for_instance( [ 925.910862] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 925.910862] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] created_port_ids = self._update_ports_for_instance( [ 925.911320] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 925.911320] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] with excutils.save_and_reraise_exception(): [ 925.911320] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 925.911320] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] self.force_reraise() [ 925.911320] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 925.911320] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] raise self.value [ 925.911320] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 925.911320] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] updated_port = self._update_port( [ 925.911320] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 925.911320] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] _ensure_no_port_binding_failure(port) [ 925.911320] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 925.911320] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] raise exception.PortBindingFailed(port_id=port['id']) [ 925.911754] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] nova.exception.PortBindingFailed: Binding failed for port 7e195214-958a-4642-8567-f18b2b01dc9e, please check neutron logs for more information. [ 925.911754] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] [ 925.911754] env[61273]: INFO nova.compute.manager [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] Terminating instance [ 925.911754] env[61273]: DEBUG oslo_concurrency.lockutils [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Acquiring lock "refresh_cache-77ca81dc-6322-41de-aaee-adf36d6ce79f" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 926.106208] env[61273]: DEBUG nova.network.neutron [req-378ddbe4-4530-4756-ad31-8506f0943c9d req-ca2ff31e-fc38-4bac-9a86-b510f0715549 service nova] [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 926.128221] env[61273]: DEBUG oslo_concurrency.lockutils [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 926.191009] env[61273]: DEBUG nova.network.neutron [req-378ddbe4-4530-4756-ad31-8506f0943c9d req-ca2ff31e-fc38-4bac-9a86-b510f0715549 service nova] [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 926.191533] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-378ddbe4-4530-4756-ad31-8506f0943c9d req-ca2ff31e-fc38-4bac-9a86-b510f0715549 service nova] Expecting reply to msg 951c1106530941ff8f982680fa8b602d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 926.200962] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 951c1106530941ff8f982680fa8b602d [ 926.324523] env[61273]: DEBUG nova.compute.utils [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 926.325154] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 49cc8ef34b6f45b6b4dbdbacfee23b1e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 926.327560] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] Expecting reply to msg c69415cc7ad4443690d5fe867e2c18dc in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 926.328521] env[61273]: DEBUG nova.compute.manager [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 926.328690] env[61273]: DEBUG nova.network.neutron [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 926.335561] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 49cc8ef34b6f45b6b4dbdbacfee23b1e [ 926.337074] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c69415cc7ad4443690d5fe867e2c18dc [ 926.364913] env[61273]: DEBUG nova.policy [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8969ac54b88a47028e5784f6575f2d61', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '63d2fb7de0ad453dbe6891e6974f1b66', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 926.664985] env[61273]: DEBUG nova.network.neutron [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] Successfully created port: 6dbb762f-c02f-40ac-8377-f96ca090d475 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 926.693647] env[61273]: DEBUG oslo_concurrency.lockutils [req-378ddbe4-4530-4756-ad31-8506f0943c9d req-ca2ff31e-fc38-4bac-9a86-b510f0715549 service nova] Releasing lock "refresh_cache-77ca81dc-6322-41de-aaee-adf36d6ce79f" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 926.694051] env[61273]: DEBUG oslo_concurrency.lockutils [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Acquired lock "refresh_cache-77ca81dc-6322-41de-aaee-adf36d6ce79f" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 926.694226] env[61273]: DEBUG nova.network.neutron [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 926.694659] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 63af812dbd06423a8699e2a20d1afc99 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 926.702496] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 63af812dbd06423a8699e2a20d1afc99 [ 926.829259] env[61273]: DEBUG nova.compute.manager [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 926.831503] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg ce8acb6896df485a9b27e7acf0a7b5f5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 926.887284] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ce8acb6896df485a9b27e7acf0a7b5f5 [ 926.936979] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f60bf5b2-48dd-42a8-bf2c-8853c47f2bcf {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.944628] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-018008a3-8476-4083-9414-ac95ad6d708b {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.979147] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3e5a577-d1ca-463a-aa4d-cbaf1a5e0083 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.986478] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af62dd09-ccc5-45c7-afd7-1bb25ce57656 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.000786] env[61273]: DEBUG nova.compute.provider_tree [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 927.001552] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] Expecting reply to msg efbe4db9962b4438b57da302cf0ee092 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 927.016710] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg efbe4db9962b4438b57da302cf0ee092 [ 927.213408] env[61273]: DEBUG nova.network.neutron [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 927.285425] env[61273]: DEBUG nova.network.neutron [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 927.285968] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 9dcac293d7004d7a8aa570fc359ab489 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 927.296176] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9dcac293d7004d7a8aa570fc359ab489 [ 927.338411] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg f05bce437f2e42b3bc72e218d2467c0d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 927.377156] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f05bce437f2e42b3bc72e218d2467c0d [ 927.499935] env[61273]: ERROR nova.compute.manager [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 6dbb762f-c02f-40ac-8377-f96ca090d475, please check neutron logs for more information. [ 927.499935] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 927.499935] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 927.499935] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 927.499935] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 927.499935] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 927.499935] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 927.499935] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 927.499935] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 927.499935] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 927.499935] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 927.499935] env[61273]: ERROR nova.compute.manager raise self.value [ 927.499935] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 927.499935] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 927.499935] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 927.499935] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 927.500474] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 927.500474] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 927.500474] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 6dbb762f-c02f-40ac-8377-f96ca090d475, please check neutron logs for more information. [ 927.500474] env[61273]: ERROR nova.compute.manager [ 927.500474] env[61273]: Traceback (most recent call last): [ 927.500474] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 927.500474] env[61273]: listener.cb(fileno) [ 927.500474] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 927.500474] env[61273]: result = function(*args, **kwargs) [ 927.500474] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 927.500474] env[61273]: return func(*args, **kwargs) [ 927.500474] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 927.500474] env[61273]: raise e [ 927.500474] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 927.500474] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 927.500474] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 927.500474] env[61273]: created_port_ids = self._update_ports_for_instance( [ 927.500474] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 927.500474] env[61273]: with excutils.save_and_reraise_exception(): [ 927.500474] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 927.500474] env[61273]: self.force_reraise() [ 927.500474] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 927.500474] env[61273]: raise self.value [ 927.500474] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 927.500474] env[61273]: updated_port = self._update_port( [ 927.500474] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 927.500474] env[61273]: _ensure_no_port_binding_failure(port) [ 927.500474] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 927.500474] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 927.501264] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 6dbb762f-c02f-40ac-8377-f96ca090d475, please check neutron logs for more information. [ 927.501264] env[61273]: Removing descriptor: 15 [ 927.505249] env[61273]: DEBUG nova.scheduler.client.report [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 927.507721] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] Expecting reply to msg 1915168052824f27819ea8175634729d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 927.521785] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1915168052824f27819ea8175634729d [ 927.612727] env[61273]: DEBUG nova.compute.manager [req-c3ab8b8d-96ac-484a-b499-4790e8fa02ff req-223f6155-6b97-44bd-bd5c-62bea35e0fab service nova] [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] Received event network-vif-deleted-7e195214-958a-4642-8567-f18b2b01dc9e {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 927.612985] env[61273]: DEBUG nova.compute.manager [req-c3ab8b8d-96ac-484a-b499-4790e8fa02ff req-223f6155-6b97-44bd-bd5c-62bea35e0fab service nova] [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] Received event network-changed-6dbb762f-c02f-40ac-8377-f96ca090d475 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 927.613108] env[61273]: DEBUG nova.compute.manager [req-c3ab8b8d-96ac-484a-b499-4790e8fa02ff req-223f6155-6b97-44bd-bd5c-62bea35e0fab service nova] [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] Refreshing instance network info cache due to event network-changed-6dbb762f-c02f-40ac-8377-f96ca090d475. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 927.613302] env[61273]: DEBUG oslo_concurrency.lockutils [req-c3ab8b8d-96ac-484a-b499-4790e8fa02ff req-223f6155-6b97-44bd-bd5c-62bea35e0fab service nova] Acquiring lock "refresh_cache-d10134a3-6f70-4f00-b810-371e17d2a1ab" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 927.613462] env[61273]: DEBUG oslo_concurrency.lockutils [req-c3ab8b8d-96ac-484a-b499-4790e8fa02ff req-223f6155-6b97-44bd-bd5c-62bea35e0fab service nova] Acquired lock "refresh_cache-d10134a3-6f70-4f00-b810-371e17d2a1ab" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 927.613590] env[61273]: DEBUG nova.network.neutron [req-c3ab8b8d-96ac-484a-b499-4790e8fa02ff req-223f6155-6b97-44bd-bd5c-62bea35e0fab service nova] [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] Refreshing network info cache for port 6dbb762f-c02f-40ac-8377-f96ca090d475 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 927.614014] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-c3ab8b8d-96ac-484a-b499-4790e8fa02ff req-223f6155-6b97-44bd-bd5c-62bea35e0fab service nova] Expecting reply to msg 2c0bb063709542bda5c18bb883f9c703 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 927.620909] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2c0bb063709542bda5c18bb883f9c703 [ 927.788637] env[61273]: DEBUG oslo_concurrency.lockutils [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Releasing lock "refresh_cache-77ca81dc-6322-41de-aaee-adf36d6ce79f" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 927.789570] env[61273]: DEBUG nova.compute.manager [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 927.789570] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 927.789570] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d6b139e9-ad3a-4748-99d7-4a76b175eb44 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.799048] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b2d1a7b-bdfe-47b7-921b-f0fe05d3e6a4 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.822567] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 77ca81dc-6322-41de-aaee-adf36d6ce79f could not be found. [ 927.822826] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 927.823012] env[61273]: INFO nova.compute.manager [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] Took 0.03 seconds to destroy the instance on the hypervisor. [ 927.824298] env[61273]: DEBUG oslo.service.loopingcall [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 927.824571] env[61273]: DEBUG nova.compute.manager [-] [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 927.825356] env[61273]: DEBUG nova.network.neutron [-] [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 927.841857] env[61273]: DEBUG nova.compute.manager [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 927.845823] env[61273]: DEBUG nova.network.neutron [-] [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 927.846349] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg def218a7b010417bbac637b0229c1318 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 927.853324] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg def218a7b010417bbac637b0229c1318 [ 927.869690] env[61273]: DEBUG nova.virt.hardware [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 927.869936] env[61273]: DEBUG nova.virt.hardware [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 927.870161] env[61273]: DEBUG nova.virt.hardware [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 927.870277] env[61273]: DEBUG nova.virt.hardware [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 927.870487] env[61273]: DEBUG nova.virt.hardware [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 927.870666] env[61273]: DEBUG nova.virt.hardware [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 927.870893] env[61273]: DEBUG nova.virt.hardware [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 927.871089] env[61273]: DEBUG nova.virt.hardware [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 927.871281] env[61273]: DEBUG nova.virt.hardware [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 927.871460] env[61273]: DEBUG nova.virt.hardware [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 927.871673] env[61273]: DEBUG nova.virt.hardware [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 927.872944] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8eb14c0-76ab-4ec1-96ad-7e2206e54f36 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.881161] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90c7af01-7b81-45c5-87d8-dae0bd3f4470 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.895515] env[61273]: ERROR nova.compute.manager [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 6dbb762f-c02f-40ac-8377-f96ca090d475, please check neutron logs for more information. [ 927.895515] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] Traceback (most recent call last): [ 927.895515] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 927.895515] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] yield resources [ 927.895515] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 927.895515] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] self.driver.spawn(context, instance, image_meta, [ 927.895515] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 927.895515] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] self._vmops.spawn(context, instance, image_meta, injected_files, [ 927.895515] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 927.895515] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] vm_ref = self.build_virtual_machine(instance, [ 927.895515] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 927.896056] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] vif_infos = vmwarevif.get_vif_info(self._session, [ 927.896056] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 927.896056] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] for vif in network_info: [ 927.896056] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 927.896056] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] return self._sync_wrapper(fn, *args, **kwargs) [ 927.896056] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 927.896056] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] self.wait() [ 927.896056] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 927.896056] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] self[:] = self._gt.wait() [ 927.896056] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 927.896056] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] return self._exit_event.wait() [ 927.896056] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 927.896056] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] current.throw(*self._exc) [ 927.896470] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 927.896470] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] result = function(*args, **kwargs) [ 927.896470] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 927.896470] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] return func(*args, **kwargs) [ 927.896470] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 927.896470] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] raise e [ 927.896470] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 927.896470] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] nwinfo = self.network_api.allocate_for_instance( [ 927.896470] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 927.896470] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] created_port_ids = self._update_ports_for_instance( [ 927.896470] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 927.896470] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] with excutils.save_and_reraise_exception(): [ 927.896470] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 927.896858] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] self.force_reraise() [ 927.896858] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 927.896858] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] raise self.value [ 927.896858] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 927.896858] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] updated_port = self._update_port( [ 927.896858] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 927.896858] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] _ensure_no_port_binding_failure(port) [ 927.896858] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 927.896858] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] raise exception.PortBindingFailed(port_id=port['id']) [ 927.896858] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] nova.exception.PortBindingFailed: Binding failed for port 6dbb762f-c02f-40ac-8377-f96ca090d475, please check neutron logs for more information. [ 927.896858] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] [ 927.896858] env[61273]: INFO nova.compute.manager [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] Terminating instance [ 927.897971] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Acquiring lock "refresh_cache-d10134a3-6f70-4f00-b810-371e17d2a1ab" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 928.010077] env[61273]: DEBUG oslo_concurrency.lockutils [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.194s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.010784] env[61273]: DEBUG nova.compute.manager [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] [instance: 2178d195-47f4-47ab-9140-b8f849973434] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 928.012670] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] Expecting reply to msg d7a56480982549abb7768b17973ce0b2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 928.013883] env[61273]: DEBUG oslo_concurrency.lockutils [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 9.077s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 928.016362] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 2b870c075ef84d3bb1d5fe16f7cce121 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 928.048111] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d7a56480982549abb7768b17973ce0b2 [ 928.054737] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2b870c075ef84d3bb1d5fe16f7cce121 [ 928.131641] env[61273]: DEBUG nova.network.neutron [req-c3ab8b8d-96ac-484a-b499-4790e8fa02ff req-223f6155-6b97-44bd-bd5c-62bea35e0fab service nova] [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 928.204726] env[61273]: DEBUG nova.network.neutron [req-c3ab8b8d-96ac-484a-b499-4790e8fa02ff req-223f6155-6b97-44bd-bd5c-62bea35e0fab service nova] [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 928.205260] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-c3ab8b8d-96ac-484a-b499-4790e8fa02ff req-223f6155-6b97-44bd-bd5c-62bea35e0fab service nova] Expecting reply to msg e4f32e7030e444e78deb22488eb47e46 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 928.213594] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e4f32e7030e444e78deb22488eb47e46 [ 928.348908] env[61273]: DEBUG nova.network.neutron [-] [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 928.349042] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg e83e1d302cd44b0181bf1610f162029c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 928.357568] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e83e1d302cd44b0181bf1610f162029c [ 928.518884] env[61273]: DEBUG nova.compute.utils [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 928.519546] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] Expecting reply to msg 6d4fde6ac5544725a57ed87785262cc4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 928.521217] env[61273]: DEBUG nova.compute.manager [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] [instance: 2178d195-47f4-47ab-9140-b8f849973434] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 928.521404] env[61273]: DEBUG nova.network.neutron [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] [instance: 2178d195-47f4-47ab-9140-b8f849973434] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 928.530944] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6d4fde6ac5544725a57ed87785262cc4 [ 928.561901] env[61273]: DEBUG nova.policy [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f6c0238a6ec4419f9249472a9454c22e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c2ffde0eb7394d029b57814b1bc14803', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 928.621471] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b67b033-e01b-4531-ba0c-a109174d1519 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.630367] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f7a5984-f765-4e7a-898f-dd37c03fa773 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.667136] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e71571c4-8d37-413b-9af1-b96f778bf653 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.675614] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d0f4526-256b-4491-99a8-da2e0c5ca650 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.691445] env[61273]: DEBUG nova.compute.provider_tree [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 928.691980] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 6bdc0651bebb4ef4ac891e05ad8eabb8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 928.698734] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6bdc0651bebb4ef4ac891e05ad8eabb8 [ 928.707462] env[61273]: DEBUG oslo_concurrency.lockutils [req-c3ab8b8d-96ac-484a-b499-4790e8fa02ff req-223f6155-6b97-44bd-bd5c-62bea35e0fab service nova] Releasing lock "refresh_cache-d10134a3-6f70-4f00-b810-371e17d2a1ab" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 928.708201] env[61273]: DEBUG nova.compute.manager [req-c3ab8b8d-96ac-484a-b499-4790e8fa02ff req-223f6155-6b97-44bd-bd5c-62bea35e0fab service nova] [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] Received event network-vif-deleted-6dbb762f-c02f-40ac-8377-f96ca090d475 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 928.708767] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Acquired lock "refresh_cache-d10134a3-6f70-4f00-b810-371e17d2a1ab" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 928.708948] env[61273]: DEBUG nova.network.neutron [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 928.709334] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 8556ff832b5f4dc59ed26ae6223ec756 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 928.716318] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8556ff832b5f4dc59ed26ae6223ec756 [ 928.802548] env[61273]: DEBUG nova.network.neutron [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] [instance: 2178d195-47f4-47ab-9140-b8f849973434] Successfully created port: becb8351-4f8e-4d86-870c-e7e66f60b598 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 928.851666] env[61273]: INFO nova.compute.manager [-] [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] Took 1.03 seconds to deallocate network for instance. [ 928.854049] env[61273]: DEBUG nova.compute.claims [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 928.854250] env[61273]: DEBUG oslo_concurrency.lockutils [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 929.021667] env[61273]: DEBUG nova.compute.manager [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] [instance: 2178d195-47f4-47ab-9140-b8f849973434] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 929.024066] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] Expecting reply to msg 3ba0df41279949878d9617adc6e1cedc in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 929.055571] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3ba0df41279949878d9617adc6e1cedc [ 929.194606] env[61273]: DEBUG nova.scheduler.client.report [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 929.197012] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 18c5026e97a84896b573218c44e33e84 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 929.213777] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 18c5026e97a84896b573218c44e33e84 [ 929.227389] env[61273]: DEBUG nova.network.neutron [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 929.295779] env[61273]: DEBUG nova.network.neutron [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 929.296367] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 843effd2abcf4c4e82ea17b20a095cd8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 929.307644] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 843effd2abcf4c4e82ea17b20a095cd8 [ 929.528217] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] Expecting reply to msg a4560e30ab4546319afa9e277ff64b2d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 929.560214] env[61273]: ERROR nova.compute.manager [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port becb8351-4f8e-4d86-870c-e7e66f60b598, please check neutron logs for more information. [ 929.560214] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 929.560214] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 929.560214] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 929.560214] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 929.560214] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 929.560214] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 929.560214] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 929.560214] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 929.560214] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 929.560214] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 929.560214] env[61273]: ERROR nova.compute.manager raise self.value [ 929.560214] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 929.560214] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 929.560214] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 929.560214] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 929.560785] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 929.560785] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 929.560785] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port becb8351-4f8e-4d86-870c-e7e66f60b598, please check neutron logs for more information. [ 929.560785] env[61273]: ERROR nova.compute.manager [ 929.560785] env[61273]: Traceback (most recent call last): [ 929.560785] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 929.560785] env[61273]: listener.cb(fileno) [ 929.560785] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 929.560785] env[61273]: result = function(*args, **kwargs) [ 929.560785] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 929.560785] env[61273]: return func(*args, **kwargs) [ 929.560785] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 929.560785] env[61273]: raise e [ 929.560785] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 929.560785] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 929.560785] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 929.560785] env[61273]: created_port_ids = self._update_ports_for_instance( [ 929.560785] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 929.560785] env[61273]: with excutils.save_and_reraise_exception(): [ 929.560785] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 929.560785] env[61273]: self.force_reraise() [ 929.560785] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 929.560785] env[61273]: raise self.value [ 929.560785] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 929.560785] env[61273]: updated_port = self._update_port( [ 929.560785] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 929.560785] env[61273]: _ensure_no_port_binding_failure(port) [ 929.560785] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 929.560785] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 929.561759] env[61273]: nova.exception.PortBindingFailed: Binding failed for port becb8351-4f8e-4d86-870c-e7e66f60b598, please check neutron logs for more information. [ 929.561759] env[61273]: Removing descriptor: 15 [ 929.561759] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a4560e30ab4546319afa9e277ff64b2d [ 929.641154] env[61273]: DEBUG nova.compute.manager [req-dbb079d0-6f7b-40d8-8405-5faba36744b0 req-d53b80ce-90bf-4b2a-9133-50df7f2456fc service nova] [instance: 2178d195-47f4-47ab-9140-b8f849973434] Received event network-changed-becb8351-4f8e-4d86-870c-e7e66f60b598 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 929.641349] env[61273]: DEBUG nova.compute.manager [req-dbb079d0-6f7b-40d8-8405-5faba36744b0 req-d53b80ce-90bf-4b2a-9133-50df7f2456fc service nova] [instance: 2178d195-47f4-47ab-9140-b8f849973434] Refreshing instance network info cache due to event network-changed-becb8351-4f8e-4d86-870c-e7e66f60b598. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 929.641573] env[61273]: DEBUG oslo_concurrency.lockutils [req-dbb079d0-6f7b-40d8-8405-5faba36744b0 req-d53b80ce-90bf-4b2a-9133-50df7f2456fc service nova] Acquiring lock "refresh_cache-2178d195-47f4-47ab-9140-b8f849973434" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 929.641649] env[61273]: DEBUG oslo_concurrency.lockutils [req-dbb079d0-6f7b-40d8-8405-5faba36744b0 req-d53b80ce-90bf-4b2a-9133-50df7f2456fc service nova] Acquired lock "refresh_cache-2178d195-47f4-47ab-9140-b8f849973434" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 929.642293] env[61273]: DEBUG nova.network.neutron [req-dbb079d0-6f7b-40d8-8405-5faba36744b0 req-d53b80ce-90bf-4b2a-9133-50df7f2456fc service nova] [instance: 2178d195-47f4-47ab-9140-b8f849973434] Refreshing network info cache for port becb8351-4f8e-4d86-870c-e7e66f60b598 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 929.642732] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-dbb079d0-6f7b-40d8-8405-5faba36744b0 req-d53b80ce-90bf-4b2a-9133-50df7f2456fc service nova] Expecting reply to msg 7c6b409bdbea4ea18ef5a891704a71b1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 929.649581] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7c6b409bdbea4ea18ef5a891704a71b1 [ 929.702603] env[61273]: DEBUG oslo_concurrency.lockutils [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.689s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 929.703259] env[61273]: ERROR nova.compute.manager [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: a70f220a-fa34-44af-939f-29292b556897] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 25c7c234-2cad-4819-a66e-0c2b3bc4be20, please check neutron logs for more information. [ 929.703259] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] Traceback (most recent call last): [ 929.703259] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 929.703259] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] self.driver.spawn(context, instance, image_meta, [ 929.703259] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 929.703259] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] self._vmops.spawn(context, instance, image_meta, injected_files, [ 929.703259] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 929.703259] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] vm_ref = self.build_virtual_machine(instance, [ 929.703259] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 929.703259] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] vif_infos = vmwarevif.get_vif_info(self._session, [ 929.703259] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 929.703644] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] for vif in network_info: [ 929.703644] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 929.703644] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] return self._sync_wrapper(fn, *args, **kwargs) [ 929.703644] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 929.703644] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] self.wait() [ 929.703644] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 929.703644] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] self[:] = self._gt.wait() [ 929.703644] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 929.703644] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] return self._exit_event.wait() [ 929.703644] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 929.703644] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] current.throw(*self._exc) [ 929.703644] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 929.703644] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] result = function(*args, **kwargs) [ 929.704110] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 929.704110] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] return func(*args, **kwargs) [ 929.704110] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 929.704110] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] raise e [ 929.704110] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 929.704110] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] nwinfo = self.network_api.allocate_for_instance( [ 929.704110] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 929.704110] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] created_port_ids = self._update_ports_for_instance( [ 929.704110] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 929.704110] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] with excutils.save_and_reraise_exception(): [ 929.704110] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 929.704110] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] self.force_reraise() [ 929.704110] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 929.704538] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] raise self.value [ 929.704538] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 929.704538] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] updated_port = self._update_port( [ 929.704538] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 929.704538] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] _ensure_no_port_binding_failure(port) [ 929.704538] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 929.704538] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] raise exception.PortBindingFailed(port_id=port['id']) [ 929.704538] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] nova.exception.PortBindingFailed: Binding failed for port 25c7c234-2cad-4819-a66e-0c2b3bc4be20, please check neutron logs for more information. [ 929.704538] env[61273]: ERROR nova.compute.manager [instance: a70f220a-fa34-44af-939f-29292b556897] [ 929.704538] env[61273]: DEBUG nova.compute.utils [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: a70f220a-fa34-44af-939f-29292b556897] Binding failed for port 25c7c234-2cad-4819-a66e-0c2b3bc4be20, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 929.705599] env[61273]: DEBUG nova.compute.manager [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: a70f220a-fa34-44af-939f-29292b556897] Build of instance a70f220a-fa34-44af-939f-29292b556897 was re-scheduled: Binding failed for port 25c7c234-2cad-4819-a66e-0c2b3bc4be20, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 929.705999] env[61273]: DEBUG nova.compute.manager [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: a70f220a-fa34-44af-939f-29292b556897] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 929.706221] env[61273]: DEBUG oslo_concurrency.lockutils [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Acquiring lock "refresh_cache-a70f220a-fa34-44af-939f-29292b556897" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 929.706363] env[61273]: DEBUG oslo_concurrency.lockutils [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Acquired lock "refresh_cache-a70f220a-fa34-44af-939f-29292b556897" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 929.706515] env[61273]: DEBUG nova.network.neutron [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: a70f220a-fa34-44af-939f-29292b556897] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 929.706912] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 83e8ffe90538429d9d8c94b0e29797cb in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 929.708025] env[61273]: DEBUG oslo_concurrency.lockutils [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 9.240s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 929.710149] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Expecting reply to msg 26c7f9d19f3345369754ac9caa1c5a41 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 929.714131] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 83e8ffe90538429d9d8c94b0e29797cb [ 929.746182] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 26c7f9d19f3345369754ac9caa1c5a41 [ 929.798060] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Releasing lock "refresh_cache-d10134a3-6f70-4f00-b810-371e17d2a1ab" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 929.798478] env[61273]: DEBUG nova.compute.manager [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 929.798664] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 929.798947] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6823aabf-0775-41b4-b9be-88361a23a9b9 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.808126] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc521a8c-973e-4318-8887-8f53d2015c85 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.829273] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d10134a3-6f70-4f00-b810-371e17d2a1ab could not be found. [ 929.829490] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 929.829666] env[61273]: INFO nova.compute.manager [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] Took 0.03 seconds to destroy the instance on the hypervisor. [ 929.829931] env[61273]: DEBUG oslo.service.loopingcall [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 929.830141] env[61273]: DEBUG nova.compute.manager [-] [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 929.830234] env[61273]: DEBUG nova.network.neutron [-] [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 929.845128] env[61273]: DEBUG nova.network.neutron [-] [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 929.845612] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 17436f18f511494da29a3157c1011400 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 929.852412] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 17436f18f511494da29a3157c1011400 [ 930.031355] env[61273]: DEBUG nova.compute.manager [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] [instance: 2178d195-47f4-47ab-9140-b8f849973434] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 930.057027] env[61273]: DEBUG nova.virt.hardware [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 930.057272] env[61273]: DEBUG nova.virt.hardware [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 930.057424] env[61273]: DEBUG nova.virt.hardware [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 930.057599] env[61273]: DEBUG nova.virt.hardware [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 930.057741] env[61273]: DEBUG nova.virt.hardware [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 930.057889] env[61273]: DEBUG nova.virt.hardware [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 930.058089] env[61273]: DEBUG nova.virt.hardware [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 930.058242] env[61273]: DEBUG nova.virt.hardware [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 930.058405] env[61273]: DEBUG nova.virt.hardware [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 930.058562] env[61273]: DEBUG nova.virt.hardware [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 930.058727] env[61273]: DEBUG nova.virt.hardware [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 930.059556] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-376c6f8e-bc9a-442d-acfa-a3543aadf37f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.067130] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-259e18a6-af74-4487-93f1-51356f065d2a {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.080325] env[61273]: ERROR nova.compute.manager [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] [instance: 2178d195-47f4-47ab-9140-b8f849973434] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port becb8351-4f8e-4d86-870c-e7e66f60b598, please check neutron logs for more information. [ 930.080325] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] Traceback (most recent call last): [ 930.080325] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 930.080325] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] yield resources [ 930.080325] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 930.080325] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] self.driver.spawn(context, instance, image_meta, [ 930.080325] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 930.080325] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] self._vmops.spawn(context, instance, image_meta, injected_files, [ 930.080325] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 930.080325] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] vm_ref = self.build_virtual_machine(instance, [ 930.080325] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 930.080722] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] vif_infos = vmwarevif.get_vif_info(self._session, [ 930.080722] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 930.080722] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] for vif in network_info: [ 930.080722] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 930.080722] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] return self._sync_wrapper(fn, *args, **kwargs) [ 930.080722] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 930.080722] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] self.wait() [ 930.080722] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 930.080722] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] self[:] = self._gt.wait() [ 930.080722] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 930.080722] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] return self._exit_event.wait() [ 930.080722] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 930.080722] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] current.throw(*self._exc) [ 930.081090] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 930.081090] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] result = function(*args, **kwargs) [ 930.081090] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 930.081090] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] return func(*args, **kwargs) [ 930.081090] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 930.081090] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] raise e [ 930.081090] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 930.081090] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] nwinfo = self.network_api.allocate_for_instance( [ 930.081090] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 930.081090] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] created_port_ids = self._update_ports_for_instance( [ 930.081090] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 930.081090] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] with excutils.save_and_reraise_exception(): [ 930.081090] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 930.081479] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] self.force_reraise() [ 930.081479] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 930.081479] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] raise self.value [ 930.081479] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 930.081479] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] updated_port = self._update_port( [ 930.081479] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 930.081479] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] _ensure_no_port_binding_failure(port) [ 930.081479] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 930.081479] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] raise exception.PortBindingFailed(port_id=port['id']) [ 930.081479] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] nova.exception.PortBindingFailed: Binding failed for port becb8351-4f8e-4d86-870c-e7e66f60b598, please check neutron logs for more information. [ 930.081479] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] [ 930.081479] env[61273]: INFO nova.compute.manager [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] [instance: 2178d195-47f4-47ab-9140-b8f849973434] Terminating instance [ 930.082491] env[61273]: DEBUG oslo_concurrency.lockutils [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] Acquiring lock "refresh_cache-2178d195-47f4-47ab-9140-b8f849973434" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 930.159318] env[61273]: DEBUG nova.network.neutron [req-dbb079d0-6f7b-40d8-8405-5faba36744b0 req-d53b80ce-90bf-4b2a-9133-50df7f2456fc service nova] [instance: 2178d195-47f4-47ab-9140-b8f849973434] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 930.223149] env[61273]: DEBUG nova.network.neutron [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: a70f220a-fa34-44af-939f-29292b556897] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 930.234318] env[61273]: DEBUG nova.network.neutron [req-dbb079d0-6f7b-40d8-8405-5faba36744b0 req-d53b80ce-90bf-4b2a-9133-50df7f2456fc service nova] [instance: 2178d195-47f4-47ab-9140-b8f849973434] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 930.234817] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-dbb079d0-6f7b-40d8-8405-5faba36744b0 req-d53b80ce-90bf-4b2a-9133-50df7f2456fc service nova] Expecting reply to msg 1836178f1ab0435689afe3b5b8c1f103 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 930.245017] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1836178f1ab0435689afe3b5b8c1f103 [ 930.289170] env[61273]: DEBUG nova.network.neutron [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: a70f220a-fa34-44af-939f-29292b556897] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 930.289706] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 4fb208b56c724e458b2128ba34ca50fa in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 930.298353] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4fb208b56c724e458b2128ba34ca50fa [ 930.306169] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcf8d784-3644-459d-878d-213690c4ec81 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.314601] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ba812d7-ce1b-49ee-9d1f-8b62ab111ec0 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.349724] env[61273]: DEBUG nova.network.neutron [-] [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 930.350736] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 9aa5772d15474dfcbc757f1a7e491228 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 930.352442] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc9b01d8-5b12-46ac-9f5e-adb49f21f15f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.359491] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9aa5772d15474dfcbc757f1a7e491228 [ 930.361187] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b8f317b-28a2-4915-8d44-baac004e6e20 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.376338] env[61273]: DEBUG nova.compute.provider_tree [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 930.376958] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Expecting reply to msg 82a515b6de1d4118afa3bcf5bb5df8d7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 930.383847] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 82a515b6de1d4118afa3bcf5bb5df8d7 [ 930.737819] env[61273]: DEBUG oslo_concurrency.lockutils [req-dbb079d0-6f7b-40d8-8405-5faba36744b0 req-d53b80ce-90bf-4b2a-9133-50df7f2456fc service nova] Releasing lock "refresh_cache-2178d195-47f4-47ab-9140-b8f849973434" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 930.737819] env[61273]: DEBUG nova.compute.manager [req-dbb079d0-6f7b-40d8-8405-5faba36744b0 req-d53b80ce-90bf-4b2a-9133-50df7f2456fc service nova] [instance: 2178d195-47f4-47ab-9140-b8f849973434] Received event network-vif-deleted-becb8351-4f8e-4d86-870c-e7e66f60b598 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 930.737819] env[61273]: DEBUG oslo_concurrency.lockutils [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] Acquired lock "refresh_cache-2178d195-47f4-47ab-9140-b8f849973434" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 930.737819] env[61273]: DEBUG nova.network.neutron [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] [instance: 2178d195-47f4-47ab-9140-b8f849973434] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 930.738142] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] Expecting reply to msg e0872d77544d4433a5e66c947ae3888f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 930.747140] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e0872d77544d4433a5e66c947ae3888f [ 930.791577] env[61273]: DEBUG oslo_concurrency.lockutils [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Releasing lock "refresh_cache-a70f220a-fa34-44af-939f-29292b556897" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 930.791943] env[61273]: DEBUG nova.compute.manager [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 930.792194] env[61273]: DEBUG nova.compute.manager [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: a70f220a-fa34-44af-939f-29292b556897] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 930.792412] env[61273]: DEBUG nova.network.neutron [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: a70f220a-fa34-44af-939f-29292b556897] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 930.807875] env[61273]: DEBUG nova.network.neutron [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: a70f220a-fa34-44af-939f-29292b556897] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 930.809114] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 33ce42cd976547049f9ebc044bfcfdad in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 930.815102] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 33ce42cd976547049f9ebc044bfcfdad [ 930.856069] env[61273]: INFO nova.compute.manager [-] [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] Took 1.03 seconds to deallocate network for instance. [ 930.858357] env[61273]: DEBUG nova.compute.claims [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 930.858528] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 930.878847] env[61273]: DEBUG nova.scheduler.client.report [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 930.881162] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Expecting reply to msg d0aa7623a7644baf81d3af3cbd2543db in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 930.893516] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d0aa7623a7644baf81d3af3cbd2543db [ 931.254835] env[61273]: DEBUG nova.network.neutron [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] [instance: 2178d195-47f4-47ab-9140-b8f849973434] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 931.310227] env[61273]: DEBUG nova.network.neutron [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: a70f220a-fa34-44af-939f-29292b556897] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 931.310728] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 11023c865ea94d94a846b30b6e65eaeb in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 931.322400] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 11023c865ea94d94a846b30b6e65eaeb [ 931.329533] env[61273]: DEBUG nova.network.neutron [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] [instance: 2178d195-47f4-47ab-9140-b8f849973434] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 931.329983] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] Expecting reply to msg 2e3e488770bd4809a33d98a4563064a5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 931.340154] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2e3e488770bd4809a33d98a4563064a5 [ 931.384089] env[61273]: DEBUG oslo_concurrency.lockutils [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.676s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 931.384746] env[61273]: ERROR nova.compute.manager [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 1d5b23b5-829f-42fc-bf5c-915565c6b728, please check neutron logs for more information. [ 931.384746] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] Traceback (most recent call last): [ 931.384746] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 931.384746] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] self.driver.spawn(context, instance, image_meta, [ 931.384746] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 931.384746] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] self._vmops.spawn(context, instance, image_meta, injected_files, [ 931.384746] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 931.384746] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] vm_ref = self.build_virtual_machine(instance, [ 931.384746] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 931.384746] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] vif_infos = vmwarevif.get_vif_info(self._session, [ 931.384746] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 931.385298] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] for vif in network_info: [ 931.385298] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 931.385298] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] return self._sync_wrapper(fn, *args, **kwargs) [ 931.385298] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 931.385298] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] self.wait() [ 931.385298] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 931.385298] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] self[:] = self._gt.wait() [ 931.385298] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 931.385298] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] return self._exit_event.wait() [ 931.385298] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 931.385298] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] current.throw(*self._exc) [ 931.385298] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 931.385298] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] result = function(*args, **kwargs) [ 931.385856] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 931.385856] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] return func(*args, **kwargs) [ 931.385856] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 931.385856] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] raise e [ 931.385856] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 931.385856] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] nwinfo = self.network_api.allocate_for_instance( [ 931.385856] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 931.385856] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] created_port_ids = self._update_ports_for_instance( [ 931.385856] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 931.385856] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] with excutils.save_and_reraise_exception(): [ 931.385856] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 931.385856] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] self.force_reraise() [ 931.385856] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 931.386421] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] raise self.value [ 931.386421] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 931.386421] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] updated_port = self._update_port( [ 931.386421] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 931.386421] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] _ensure_no_port_binding_failure(port) [ 931.386421] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 931.386421] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] raise exception.PortBindingFailed(port_id=port['id']) [ 931.386421] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] nova.exception.PortBindingFailed: Binding failed for port 1d5b23b5-829f-42fc-bf5c-915565c6b728, please check neutron logs for more information. [ 931.386421] env[61273]: ERROR nova.compute.manager [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] [ 931.386421] env[61273]: DEBUG nova.compute.utils [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] Binding failed for port 1d5b23b5-829f-42fc-bf5c-915565c6b728, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 931.386845] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.537s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 931.388119] env[61273]: INFO nova.compute.claims [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 80491222-910b-48e1-a431-3116c336a9a5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 931.389772] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg 48b7d8f6919649da8c5df0374ac53a90 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 931.390956] env[61273]: DEBUG nova.compute.manager [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] Build of instance c9214700-faf8-4a26-8084-ffe4a2c06480 was re-scheduled: Binding failed for port 1d5b23b5-829f-42fc-bf5c-915565c6b728, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 931.391416] env[61273]: DEBUG nova.compute.manager [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 931.391720] env[61273]: DEBUG oslo_concurrency.lockutils [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Acquiring lock "refresh_cache-c9214700-faf8-4a26-8084-ffe4a2c06480" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 931.391914] env[61273]: DEBUG oslo_concurrency.lockutils [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Acquired lock "refresh_cache-c9214700-faf8-4a26-8084-ffe4a2c06480" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 931.392096] env[61273]: DEBUG nova.network.neutron [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 931.392604] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Expecting reply to msg f2bd499886f349ff8e678a24a00327e7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 931.398514] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f2bd499886f349ff8e678a24a00327e7 [ 931.427081] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 48b7d8f6919649da8c5df0374ac53a90 [ 931.812804] env[61273]: INFO nova.compute.manager [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: a70f220a-fa34-44af-939f-29292b556897] Took 1.02 seconds to deallocate network for instance. [ 931.814434] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 3376b274451842ffa3bb91803ddbfad3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 931.832194] env[61273]: DEBUG oslo_concurrency.lockutils [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] Releasing lock "refresh_cache-2178d195-47f4-47ab-9140-b8f849973434" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 931.832653] env[61273]: DEBUG nova.compute.manager [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] [instance: 2178d195-47f4-47ab-9140-b8f849973434] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 931.832901] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] [instance: 2178d195-47f4-47ab-9140-b8f849973434] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 931.833189] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f693ebdf-21a8-415b-a004-e1e04db14b45 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.841734] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec12e982-f689-4415-b880-1e8ebfe69dce {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.852156] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3376b274451842ffa3bb91803ddbfad3 [ 931.863191] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] [instance: 2178d195-47f4-47ab-9140-b8f849973434] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2178d195-47f4-47ab-9140-b8f849973434 could not be found. [ 931.863393] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] [instance: 2178d195-47f4-47ab-9140-b8f849973434] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 931.863571] env[61273]: INFO nova.compute.manager [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] [instance: 2178d195-47f4-47ab-9140-b8f849973434] Took 0.03 seconds to destroy the instance on the hypervisor. [ 931.863806] env[61273]: DEBUG oslo.service.loopingcall [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 931.864023] env[61273]: DEBUG nova.compute.manager [-] [instance: 2178d195-47f4-47ab-9140-b8f849973434] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 931.864118] env[61273]: DEBUG nova.network.neutron [-] [instance: 2178d195-47f4-47ab-9140-b8f849973434] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 931.878326] env[61273]: DEBUG nova.network.neutron [-] [instance: 2178d195-47f4-47ab-9140-b8f849973434] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 931.878782] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg d700431b77b84715b71924827ac8584f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 931.885341] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d700431b77b84715b71924827ac8584f [ 931.894874] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg 9b173bf8a1ce4efa94ff781d48438ad3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 931.901884] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9b173bf8a1ce4efa94ff781d48438ad3 [ 931.909334] env[61273]: DEBUG nova.network.neutron [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 931.970702] env[61273]: DEBUG nova.network.neutron [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 931.971216] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Expecting reply to msg f033b4f55b534c39b89db33ec69d08dc in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 931.979274] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f033b4f55b534c39b89db33ec69d08dc [ 932.318736] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 9cbd37d6b36d48a09e17e3f7cce809bf in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 932.348586] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9cbd37d6b36d48a09e17e3f7cce809bf [ 932.380533] env[61273]: DEBUG nova.network.neutron [-] [instance: 2178d195-47f4-47ab-9140-b8f849973434] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 932.380993] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 82bb9e8d6be648d69ee54f6a8d0b4772 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 932.389447] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 82bb9e8d6be648d69ee54f6a8d0b4772 [ 932.473316] env[61273]: DEBUG oslo_concurrency.lockutils [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Releasing lock "refresh_cache-c9214700-faf8-4a26-8084-ffe4a2c06480" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 932.473546] env[61273]: DEBUG nova.compute.manager [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 932.473727] env[61273]: DEBUG nova.compute.manager [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 932.473894] env[61273]: DEBUG nova.network.neutron [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 932.488615] env[61273]: DEBUG nova.network.neutron [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 932.489131] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Expecting reply to msg ffe463f6410544328061b08327af726f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 932.495883] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ffe463f6410544328061b08327af726f [ 932.499029] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba0b3a14-83cd-46fd-ac02-5682f683212a {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.506570] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-211b4b35-8d81-4dc2-84b0-27b20f7cfa08 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.536811] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbee5f63-1e37-49fc-a271-54fa079c95ff {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.543591] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d4a1a89-b47f-43f4-bfce-96d7afef694f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.556104] env[61273]: DEBUG nova.compute.provider_tree [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 932.556581] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg 56bc14bfe3734923b4cd84f8e7e5368f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 932.563356] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 56bc14bfe3734923b4cd84f8e7e5368f [ 932.843297] env[61273]: INFO nova.scheduler.client.report [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Deleted allocations for instance a70f220a-fa34-44af-939f-29292b556897 [ 932.849506] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 74310a82171644f7ba73f6e52c461320 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 932.863828] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 74310a82171644f7ba73f6e52c461320 [ 932.883076] env[61273]: INFO nova.compute.manager [-] [instance: 2178d195-47f4-47ab-9140-b8f849973434] Took 1.02 seconds to deallocate network for instance. [ 932.885364] env[61273]: DEBUG nova.compute.claims [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] [instance: 2178d195-47f4-47ab-9140-b8f849973434] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 932.885546] env[61273]: DEBUG oslo_concurrency.lockutils [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 932.994036] env[61273]: DEBUG nova.network.neutron [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 932.994580] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Expecting reply to msg 3a99207fae0c449fb8f365d79b6ae131 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 933.008513] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3a99207fae0c449fb8f365d79b6ae131 [ 933.058630] env[61273]: DEBUG nova.scheduler.client.report [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 933.061074] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg 3d7ce9fa2bfd40f7aea6c58c638acbdb in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 933.073411] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3d7ce9fa2bfd40f7aea6c58c638acbdb [ 933.351665] env[61273]: DEBUG oslo_concurrency.lockutils [None req-89f2dafa-1d36-4333-a516-37afe2ea854c tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Lock "a70f220a-fa34-44af-939f-29292b556897" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.962s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 933.496635] env[61273]: INFO nova.compute.manager [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: c9214700-faf8-4a26-8084-ffe4a2c06480] Took 1.02 seconds to deallocate network for instance. [ 933.498472] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Expecting reply to msg 44e7af52aec04b74a4ac915092a245a3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 933.541955] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 44e7af52aec04b74a4ac915092a245a3 [ 933.563277] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.177s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 933.563778] env[61273]: DEBUG nova.compute.manager [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 80491222-910b-48e1-a431-3116c336a9a5] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 933.565586] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg 35595c94c8964318a5549476720ff434 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 933.567135] env[61273]: DEBUG oslo_concurrency.lockutils [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.438s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 933.567987] env[61273]: INFO nova.compute.claims [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 933.569649] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg 0ad0a269b5f1435a948f4b85da0290e3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 933.600230] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 35595c94c8964318a5549476720ff434 [ 933.606149] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0ad0a269b5f1435a948f4b85da0290e3 [ 934.004315] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Expecting reply to msg 0fb27b5878754d958337453582913dd0 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 934.037921] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0fb27b5878754d958337453582913dd0 [ 934.048587] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 934.048781] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 934.048923] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Starting heal instance info cache {{(pid=61273) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9926}} [ 934.049039] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Rebuilding the list of instances to heal {{(pid=61273) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 934.049565] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 1faf05454a814b87ac510d4f6049e4dc in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 934.062485] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1faf05454a814b87ac510d4f6049e4dc [ 934.073020] env[61273]: DEBUG nova.compute.utils [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 934.073618] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg 4e2dd32917b74bdd921d705bed5dbf9e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 934.074427] env[61273]: DEBUG nova.compute.manager [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 80491222-910b-48e1-a431-3116c336a9a5] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 934.074585] env[61273]: DEBUG nova.network.neutron [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 80491222-910b-48e1-a431-3116c336a9a5] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 934.077650] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg d79a673d27544e1ea050ba0b588effb1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 934.081623] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4e2dd32917b74bdd921d705bed5dbf9e [ 934.083712] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d79a673d27544e1ea050ba0b588effb1 [ 934.121833] env[61273]: DEBUG nova.policy [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '77bd4b765f214cb8bb602e8e52071531', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4a752424d76840dabab55a9202e7a635', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 934.334677] env[61273]: DEBUG oslo_concurrency.lockutils [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Acquiring lock "4196c9a3-53d2-4a6d-b944-813be319cbdd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 934.334953] env[61273]: DEBUG oslo_concurrency.lockutils [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Lock "4196c9a3-53d2-4a6d-b944-813be319cbdd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 934.335412] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 4277cfd2fdd247cab40ba7b700b7ac7c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 934.344971] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4277cfd2fdd247cab40ba7b700b7ac7c [ 934.434476] env[61273]: DEBUG nova.network.neutron [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 80491222-910b-48e1-a431-3116c336a9a5] Successfully created port: 34c92d02-9079-4f11-a58a-625d44ed4cf1 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 934.528068] env[61273]: INFO nova.scheduler.client.report [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Deleted allocations for instance c9214700-faf8-4a26-8084-ffe4a2c06480 [ 934.534159] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Expecting reply to msg 564f808dcca447b7ad701cf0d0a9d042 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 934.544093] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 564f808dcca447b7ad701cf0d0a9d042 [ 934.552434] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] Skipping network cache update for instance because it is Building. {{(pid=61273) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 934.552584] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] Skipping network cache update for instance because it is Building. {{(pid=61273) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 934.552710] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] [instance: 2178d195-47f4-47ab-9140-b8f849973434] Skipping network cache update for instance because it is Building. {{(pid=61273) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 934.552856] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] [instance: 80491222-910b-48e1-a431-3116c336a9a5] Skipping network cache update for instance because it is Building. {{(pid=61273) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 934.552988] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] Skipping network cache update for instance because it is Building. {{(pid=61273) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 934.553104] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Didn't find any instances for network info cache update. {{(pid=61273) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10012}} [ 934.554226] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 934.554416] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 934.554567] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 934.554716] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 934.554854] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 934.554994] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 934.555121] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61273) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10545}} [ 934.555259] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager.update_available_resource {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 934.555563] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg d4622655c0804b9d83830c4d48a45fea in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 934.566135] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d4622655c0804b9d83830c4d48a45fea [ 934.579669] env[61273]: DEBUG nova.compute.manager [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 80491222-910b-48e1-a431-3116c336a9a5] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 934.581468] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg c4781687fd5b42029862aa7cb06f940c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 934.626236] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c4781687fd5b42029862aa7cb06f940c [ 934.668674] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d1ca992-98ba-447d-8343-d6887a61a04c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.676241] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dbe18da-d6f2-47d7-8a53-082512b0f68d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.704964] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f902c3f-6fa6-4bea-8602-99b1e106f52d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.712086] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e098e7c-b49e-4c55-ae39-1309f1ef86e1 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.725910] env[61273]: DEBUG nova.compute.provider_tree [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 934.726358] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg bfa255ee642a41339626afb7f19ac82a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 934.733709] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bfa255ee642a41339626afb7f19ac82a [ 934.838436] env[61273]: DEBUG nova.compute.manager [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 934.840211] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 426c7f15afad488a98a2161397fe0580 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 934.872042] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 426c7f15afad488a98a2161397fe0580 [ 935.035973] env[61273]: DEBUG oslo_concurrency.lockutils [None req-1b0eb2c8-b5c5-4fd1-a35a-82ced94c8ecc tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Lock "c9214700-faf8-4a26-8084-ffe4a2c06480" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.428s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 935.057935] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 935.088521] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg 2047c56ea8244357ba775ba2ace9538f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 935.103757] env[61273]: DEBUG nova.compute.manager [req-7364c899-98d9-49db-814c-96ab71b0d2f0 req-0d57f63e-b6a9-4428-8342-4e0cba2cf61a service nova] [instance: 80491222-910b-48e1-a431-3116c336a9a5] Received event network-changed-34c92d02-9079-4f11-a58a-625d44ed4cf1 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 935.104008] env[61273]: DEBUG nova.compute.manager [req-7364c899-98d9-49db-814c-96ab71b0d2f0 req-0d57f63e-b6a9-4428-8342-4e0cba2cf61a service nova] [instance: 80491222-910b-48e1-a431-3116c336a9a5] Refreshing instance network info cache due to event network-changed-34c92d02-9079-4f11-a58a-625d44ed4cf1. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 935.104234] env[61273]: DEBUG oslo_concurrency.lockutils [req-7364c899-98d9-49db-814c-96ab71b0d2f0 req-0d57f63e-b6a9-4428-8342-4e0cba2cf61a service nova] Acquiring lock "refresh_cache-80491222-910b-48e1-a431-3116c336a9a5" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 935.104375] env[61273]: DEBUG oslo_concurrency.lockutils [req-7364c899-98d9-49db-814c-96ab71b0d2f0 req-0d57f63e-b6a9-4428-8342-4e0cba2cf61a service nova] Acquired lock "refresh_cache-80491222-910b-48e1-a431-3116c336a9a5" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 935.104585] env[61273]: DEBUG nova.network.neutron [req-7364c899-98d9-49db-814c-96ab71b0d2f0 req-0d57f63e-b6a9-4428-8342-4e0cba2cf61a service nova] [instance: 80491222-910b-48e1-a431-3116c336a9a5] Refreshing network info cache for port 34c92d02-9079-4f11-a58a-625d44ed4cf1 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 935.105687] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-7364c899-98d9-49db-814c-96ab71b0d2f0 req-0d57f63e-b6a9-4428-8342-4e0cba2cf61a service nova] Expecting reply to msg 7f9fd04ec6944d55b83b565757a18854 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 935.111742] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7f9fd04ec6944d55b83b565757a18854 [ 935.120987] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2047c56ea8244357ba775ba2ace9538f [ 935.228542] env[61273]: DEBUG nova.scheduler.client.report [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 935.231593] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg f673b086c5424cb4a45d199468896f6f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 935.246224] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f673b086c5424cb4a45d199468896f6f [ 935.249878] env[61273]: ERROR nova.compute.manager [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 34c92d02-9079-4f11-a58a-625d44ed4cf1, please check neutron logs for more information. [ 935.249878] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 935.249878] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 935.249878] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 935.249878] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 935.249878] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 935.249878] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 935.249878] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 935.249878] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 935.249878] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 935.249878] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 935.249878] env[61273]: ERROR nova.compute.manager raise self.value [ 935.249878] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 935.249878] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 935.249878] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 935.249878] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 935.250390] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 935.250390] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 935.250390] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 34c92d02-9079-4f11-a58a-625d44ed4cf1, please check neutron logs for more information. [ 935.250390] env[61273]: ERROR nova.compute.manager [ 935.250390] env[61273]: Traceback (most recent call last): [ 935.250390] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 935.250390] env[61273]: listener.cb(fileno) [ 935.250390] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 935.250390] env[61273]: result = function(*args, **kwargs) [ 935.250390] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 935.250390] env[61273]: return func(*args, **kwargs) [ 935.250390] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 935.250390] env[61273]: raise e [ 935.250390] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 935.250390] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 935.250390] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 935.250390] env[61273]: created_port_ids = self._update_ports_for_instance( [ 935.250390] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 935.250390] env[61273]: with excutils.save_and_reraise_exception(): [ 935.250390] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 935.250390] env[61273]: self.force_reraise() [ 935.250390] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 935.250390] env[61273]: raise self.value [ 935.250390] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 935.250390] env[61273]: updated_port = self._update_port( [ 935.250390] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 935.250390] env[61273]: _ensure_no_port_binding_failure(port) [ 935.250390] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 935.250390] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 935.251394] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 34c92d02-9079-4f11-a58a-625d44ed4cf1, please check neutron logs for more information. [ 935.251394] env[61273]: Removing descriptor: 19 [ 935.360285] env[61273]: DEBUG oslo_concurrency.lockutils [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 935.592293] env[61273]: DEBUG nova.compute.manager [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 80491222-910b-48e1-a431-3116c336a9a5] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 935.618648] env[61273]: DEBUG nova.virt.hardware [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 935.618921] env[61273]: DEBUG nova.virt.hardware [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 935.619082] env[61273]: DEBUG nova.virt.hardware [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 935.619258] env[61273]: DEBUG nova.virt.hardware [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 935.619403] env[61273]: DEBUG nova.virt.hardware [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 935.619550] env[61273]: DEBUG nova.virt.hardware [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 935.619776] env[61273]: DEBUG nova.virt.hardware [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 935.619949] env[61273]: DEBUG nova.virt.hardware [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 935.620988] env[61273]: DEBUG nova.virt.hardware [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 935.620988] env[61273]: DEBUG nova.virt.hardware [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 935.620988] env[61273]: DEBUG nova.virt.hardware [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 935.621390] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c6bf039-9cd5-45b9-b6e6-bdc31c0aad84 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.626385] env[61273]: DEBUG nova.network.neutron [req-7364c899-98d9-49db-814c-96ab71b0d2f0 req-0d57f63e-b6a9-4428-8342-4e0cba2cf61a service nova] [instance: 80491222-910b-48e1-a431-3116c336a9a5] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 935.631259] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d36f3b95-625c-46a8-90af-33e7ae72a4ce {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.645806] env[61273]: ERROR nova.compute.manager [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 80491222-910b-48e1-a431-3116c336a9a5] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 34c92d02-9079-4f11-a58a-625d44ed4cf1, please check neutron logs for more information. [ 935.645806] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] Traceback (most recent call last): [ 935.645806] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 935.645806] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] yield resources [ 935.645806] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 935.645806] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] self.driver.spawn(context, instance, image_meta, [ 935.645806] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 935.645806] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 935.645806] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 935.645806] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] vm_ref = self.build_virtual_machine(instance, [ 935.645806] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 935.646537] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] vif_infos = vmwarevif.get_vif_info(self._session, [ 935.646537] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 935.646537] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] for vif in network_info: [ 935.646537] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 935.646537] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] return self._sync_wrapper(fn, *args, **kwargs) [ 935.646537] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 935.646537] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] self.wait() [ 935.646537] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 935.646537] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] self[:] = self._gt.wait() [ 935.646537] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 935.646537] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] return self._exit_event.wait() [ 935.646537] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 935.646537] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] current.throw(*self._exc) [ 935.647141] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 935.647141] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] result = function(*args, **kwargs) [ 935.647141] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 935.647141] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] return func(*args, **kwargs) [ 935.647141] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 935.647141] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] raise e [ 935.647141] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 935.647141] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] nwinfo = self.network_api.allocate_for_instance( [ 935.647141] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 935.647141] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] created_port_ids = self._update_ports_for_instance( [ 935.647141] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 935.647141] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] with excutils.save_and_reraise_exception(): [ 935.647141] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 935.648408] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] self.force_reraise() [ 935.648408] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 935.648408] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] raise self.value [ 935.648408] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 935.648408] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] updated_port = self._update_port( [ 935.648408] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 935.648408] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] _ensure_no_port_binding_failure(port) [ 935.648408] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 935.648408] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] raise exception.PortBindingFailed(port_id=port['id']) [ 935.648408] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] nova.exception.PortBindingFailed: Binding failed for port 34c92d02-9079-4f11-a58a-625d44ed4cf1, please check neutron logs for more information. [ 935.648408] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] [ 935.648408] env[61273]: INFO nova.compute.manager [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 80491222-910b-48e1-a431-3116c336a9a5] Terminating instance [ 935.649241] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Acquiring lock "refresh_cache-80491222-910b-48e1-a431-3116c336a9a5" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 935.709788] env[61273]: DEBUG nova.network.neutron [req-7364c899-98d9-49db-814c-96ab71b0d2f0 req-0d57f63e-b6a9-4428-8342-4e0cba2cf61a service nova] [instance: 80491222-910b-48e1-a431-3116c336a9a5] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 935.710317] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-7364c899-98d9-49db-814c-96ab71b0d2f0 req-0d57f63e-b6a9-4428-8342-4e0cba2cf61a service nova] Expecting reply to msg 776ffb0ba7484ce9addc5dacf02f0f3f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 935.717885] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 776ffb0ba7484ce9addc5dacf02f0f3f [ 935.733975] env[61273]: DEBUG oslo_concurrency.lockutils [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.167s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 935.734526] env[61273]: DEBUG nova.compute.manager [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 935.736357] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg 057e464a6e89488ab8c3fe4f860d6d47 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 935.737297] env[61273]: DEBUG oslo_concurrency.lockutils [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 6.883s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 935.739018] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 79204b63e903408185a6e1184d61af14 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 935.767021] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 057e464a6e89488ab8c3fe4f860d6d47 [ 935.771481] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 79204b63e903408185a6e1184d61af14 [ 936.212706] env[61273]: DEBUG oslo_concurrency.lockutils [req-7364c899-98d9-49db-814c-96ab71b0d2f0 req-0d57f63e-b6a9-4428-8342-4e0cba2cf61a service nova] Releasing lock "refresh_cache-80491222-910b-48e1-a431-3116c336a9a5" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 936.213084] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Acquired lock "refresh_cache-80491222-910b-48e1-a431-3116c336a9a5" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 936.213297] env[61273]: DEBUG nova.network.neutron [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 80491222-910b-48e1-a431-3116c336a9a5] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 936.213702] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg 13082aad755d439f8a537c4d729bfd32 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 936.220842] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 13082aad755d439f8a537c4d729bfd32 [ 936.242356] env[61273]: DEBUG nova.compute.utils [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 936.242985] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg ea6af49b0b1d4fa6b1af63792603a510 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 936.243749] env[61273]: DEBUG nova.compute.manager [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 936.243915] env[61273]: DEBUG nova.network.neutron [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 936.253950] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ea6af49b0b1d4fa6b1af63792603a510 [ 936.281318] env[61273]: DEBUG nova.policy [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'afd2b293ac5747749b0bae2b787277ca', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6b8d7d7387e44003b6b4bc488c7900f3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 936.348977] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf0b1b52-ef6c-488f-a141-56f4bb4cc402 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.357841] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11c8996e-e695-4cd9-b178-126aefda18d3 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.387110] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9ead054-3ae9-40ce-9ded-4f6f96e5a38a {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.394426] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08b63b86-2c23-472f-884d-80c6e29cd569 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.407673] env[61273]: DEBUG nova.compute.provider_tree [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 936.408171] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 7e26fd12d15743daaf7543917e36bbe4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 936.414835] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7e26fd12d15743daaf7543917e36bbe4 [ 936.563741] env[61273]: DEBUG nova.network.neutron [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] Successfully created port: cbe637dd-c959-40d2-af09-5b04853ed211 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 936.731220] env[61273]: DEBUG nova.network.neutron [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 80491222-910b-48e1-a431-3116c336a9a5] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 936.749774] env[61273]: DEBUG nova.compute.manager [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 936.751666] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg 172e46b06467407f9345e6f4d261c4dc in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 936.788648] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 172e46b06467407f9345e6f4d261c4dc [ 936.829832] env[61273]: DEBUG nova.network.neutron [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 80491222-910b-48e1-a431-3116c336a9a5] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 936.830360] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg 8efefb3d2dd14a64bd509fb5207d766a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 936.839762] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8efefb3d2dd14a64bd509fb5207d766a [ 936.910359] env[61273]: DEBUG nova.scheduler.client.report [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 936.913342] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg ede93839377d4223827f051f84e6b930 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 936.924679] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ede93839377d4223827f051f84e6b930 [ 937.139578] env[61273]: DEBUG nova.compute.manager [req-278eabed-8468-46f0-af86-160f02b58383 req-37d968ed-25d0-467c-b983-f082be8e98e5 service nova] [instance: 80491222-910b-48e1-a431-3116c336a9a5] Received event network-vif-deleted-34c92d02-9079-4f11-a58a-625d44ed4cf1 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 937.256361] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg 976b4e22848047f785fa9aa800d9753d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 937.290927] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 976b4e22848047f785fa9aa800d9753d [ 937.332355] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Releasing lock "refresh_cache-80491222-910b-48e1-a431-3116c336a9a5" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 937.332846] env[61273]: DEBUG nova.compute.manager [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 80491222-910b-48e1-a431-3116c336a9a5] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 937.333056] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 80491222-910b-48e1-a431-3116c336a9a5] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 937.333348] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-19b0c8e3-aa03-49ef-9721-0a24a5d0ac39 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.345856] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe526d09-85c8-4096-ab17-aebd25815b26 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.374566] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 80491222-910b-48e1-a431-3116c336a9a5] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 80491222-910b-48e1-a431-3116c336a9a5 could not be found. [ 937.374759] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 80491222-910b-48e1-a431-3116c336a9a5] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 937.374940] env[61273]: INFO nova.compute.manager [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 80491222-910b-48e1-a431-3116c336a9a5] Took 0.04 seconds to destroy the instance on the hypervisor. [ 937.375177] env[61273]: DEBUG oslo.service.loopingcall [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 937.375383] env[61273]: DEBUG nova.compute.manager [-] [instance: 80491222-910b-48e1-a431-3116c336a9a5] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 937.375472] env[61273]: DEBUG nova.network.neutron [-] [instance: 80491222-910b-48e1-a431-3116c336a9a5] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 937.392824] env[61273]: DEBUG nova.network.neutron [-] [instance: 80491222-910b-48e1-a431-3116c336a9a5] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 937.393128] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 7ce046bd815a43cb985efe021877b5cc in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 937.400578] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7ce046bd815a43cb985efe021877b5cc [ 937.415567] env[61273]: DEBUG oslo_concurrency.lockutils [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.678s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 937.416313] env[61273]: ERROR nova.compute.manager [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 7e195214-958a-4642-8567-f18b2b01dc9e, please check neutron logs for more information. [ 937.416313] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] Traceback (most recent call last): [ 937.416313] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 937.416313] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] self.driver.spawn(context, instance, image_meta, [ 937.416313] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 937.416313] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 937.416313] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 937.416313] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] vm_ref = self.build_virtual_machine(instance, [ 937.416313] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 937.416313] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] vif_infos = vmwarevif.get_vif_info(self._session, [ 937.416313] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 937.416774] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] for vif in network_info: [ 937.416774] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 937.416774] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] return self._sync_wrapper(fn, *args, **kwargs) [ 937.416774] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 937.416774] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] self.wait() [ 937.416774] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 937.416774] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] self[:] = self._gt.wait() [ 937.416774] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 937.416774] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] return self._exit_event.wait() [ 937.416774] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 937.416774] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] result = hub.switch() [ 937.416774] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 937.416774] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] return self.greenlet.switch() [ 937.417285] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 937.417285] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] result = function(*args, **kwargs) [ 937.417285] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 937.417285] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] return func(*args, **kwargs) [ 937.417285] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 937.417285] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] raise e [ 937.417285] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 937.417285] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] nwinfo = self.network_api.allocate_for_instance( [ 937.417285] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 937.417285] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] created_port_ids = self._update_ports_for_instance( [ 937.417285] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 937.417285] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] with excutils.save_and_reraise_exception(): [ 937.417285] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 937.417770] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] self.force_reraise() [ 937.417770] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 937.417770] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] raise self.value [ 937.417770] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 937.417770] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] updated_port = self._update_port( [ 937.417770] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 937.417770] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] _ensure_no_port_binding_failure(port) [ 937.417770] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 937.417770] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] raise exception.PortBindingFailed(port_id=port['id']) [ 937.417770] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] nova.exception.PortBindingFailed: Binding failed for port 7e195214-958a-4642-8567-f18b2b01dc9e, please check neutron logs for more information. [ 937.417770] env[61273]: ERROR nova.compute.manager [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] [ 937.418178] env[61273]: DEBUG nova.compute.utils [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] Binding failed for port 7e195214-958a-4642-8567-f18b2b01dc9e, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 937.418178] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 6.560s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 937.420071] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg e9c44b91e510458f89c921dcf9f7ea0e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 937.421421] env[61273]: DEBUG nova.compute.manager [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] Build of instance 77ca81dc-6322-41de-aaee-adf36d6ce79f was re-scheduled: Binding failed for port 7e195214-958a-4642-8567-f18b2b01dc9e, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 937.421889] env[61273]: DEBUG nova.compute.manager [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 937.422141] env[61273]: DEBUG oslo_concurrency.lockutils [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Acquiring lock "refresh_cache-77ca81dc-6322-41de-aaee-adf36d6ce79f" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 937.422287] env[61273]: DEBUG oslo_concurrency.lockutils [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Acquired lock "refresh_cache-77ca81dc-6322-41de-aaee-adf36d6ce79f" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 937.422440] env[61273]: DEBUG nova.network.neutron [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 937.422844] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg fc8debb3d0344a39aae247705f6643c9 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 937.428946] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fc8debb3d0344a39aae247705f6643c9 [ 937.453060] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e9c44b91e510458f89c921dcf9f7ea0e [ 937.760171] env[61273]: DEBUG nova.compute.manager [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 937.769561] env[61273]: ERROR nova.compute.manager [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port cbe637dd-c959-40d2-af09-5b04853ed211, please check neutron logs for more information. [ 937.769561] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 937.769561] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 937.769561] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 937.769561] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 937.769561] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 937.769561] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 937.769561] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 937.769561] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 937.769561] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 937.769561] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 937.769561] env[61273]: ERROR nova.compute.manager raise self.value [ 937.769561] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 937.769561] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 937.769561] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 937.769561] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 937.770148] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 937.770148] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 937.770148] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port cbe637dd-c959-40d2-af09-5b04853ed211, please check neutron logs for more information. [ 937.770148] env[61273]: ERROR nova.compute.manager [ 937.770148] env[61273]: Traceback (most recent call last): [ 937.770148] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 937.770148] env[61273]: listener.cb(fileno) [ 937.770148] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 937.770148] env[61273]: result = function(*args, **kwargs) [ 937.770148] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 937.770148] env[61273]: return func(*args, **kwargs) [ 937.770148] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 937.770148] env[61273]: raise e [ 937.770148] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 937.770148] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 937.770148] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 937.770148] env[61273]: created_port_ids = self._update_ports_for_instance( [ 937.770148] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 937.770148] env[61273]: with excutils.save_and_reraise_exception(): [ 937.770148] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 937.770148] env[61273]: self.force_reraise() [ 937.770148] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 937.770148] env[61273]: raise self.value [ 937.770148] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 937.770148] env[61273]: updated_port = self._update_port( [ 937.770148] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 937.770148] env[61273]: _ensure_no_port_binding_failure(port) [ 937.770148] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 937.770148] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 937.771204] env[61273]: nova.exception.PortBindingFailed: Binding failed for port cbe637dd-c959-40d2-af09-5b04853ed211, please check neutron logs for more information. [ 937.771204] env[61273]: Removing descriptor: 19 [ 937.785933] env[61273]: DEBUG nova.virt.hardware [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 937.786203] env[61273]: DEBUG nova.virt.hardware [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 937.786358] env[61273]: DEBUG nova.virt.hardware [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 937.786534] env[61273]: DEBUG nova.virt.hardware [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 937.786675] env[61273]: DEBUG nova.virt.hardware [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 937.786818] env[61273]: DEBUG nova.virt.hardware [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 937.787091] env[61273]: DEBUG nova.virt.hardware [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 937.787268] env[61273]: DEBUG nova.virt.hardware [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 937.787435] env[61273]: DEBUG nova.virt.hardware [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 937.787591] env[61273]: DEBUG nova.virt.hardware [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 937.787757] env[61273]: DEBUG nova.virt.hardware [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 937.790062] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c3bfc30-2e7d-4589-833c-0c25c31beb66 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.798043] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-557d14a6-ed00-4ded-bdf7-81bfae0ae159 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.812309] env[61273]: ERROR nova.compute.manager [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port cbe637dd-c959-40d2-af09-5b04853ed211, please check neutron logs for more information. [ 937.812309] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] Traceback (most recent call last): [ 937.812309] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 937.812309] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] yield resources [ 937.812309] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 937.812309] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] self.driver.spawn(context, instance, image_meta, [ 937.812309] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 937.812309] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 937.812309] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 937.812309] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] vm_ref = self.build_virtual_machine(instance, [ 937.812309] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 937.812857] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] vif_infos = vmwarevif.get_vif_info(self._session, [ 937.812857] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 937.812857] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] for vif in network_info: [ 937.812857] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 937.812857] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] return self._sync_wrapper(fn, *args, **kwargs) [ 937.812857] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 937.812857] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] self.wait() [ 937.812857] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 937.812857] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] self[:] = self._gt.wait() [ 937.812857] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 937.812857] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] return self._exit_event.wait() [ 937.812857] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 937.812857] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] current.throw(*self._exc) [ 937.813353] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 937.813353] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] result = function(*args, **kwargs) [ 937.813353] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 937.813353] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] return func(*args, **kwargs) [ 937.813353] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 937.813353] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] raise e [ 937.813353] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 937.813353] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] nwinfo = self.network_api.allocate_for_instance( [ 937.813353] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 937.813353] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] created_port_ids = self._update_ports_for_instance( [ 937.813353] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 937.813353] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] with excutils.save_and_reraise_exception(): [ 937.813353] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 937.813872] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] self.force_reraise() [ 937.813872] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 937.813872] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] raise self.value [ 937.813872] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 937.813872] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] updated_port = self._update_port( [ 937.813872] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 937.813872] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] _ensure_no_port_binding_failure(port) [ 937.813872] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 937.813872] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] raise exception.PortBindingFailed(port_id=port['id']) [ 937.813872] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] nova.exception.PortBindingFailed: Binding failed for port cbe637dd-c959-40d2-af09-5b04853ed211, please check neutron logs for more information. [ 937.813872] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] [ 937.813872] env[61273]: INFO nova.compute.manager [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] Terminating instance [ 937.814638] env[61273]: DEBUG oslo_concurrency.lockutils [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Acquiring lock "refresh_cache-ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 937.814855] env[61273]: DEBUG oslo_concurrency.lockutils [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Acquired lock "refresh_cache-ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 937.815036] env[61273]: DEBUG nova.network.neutron [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 937.815476] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg 1f944464838744fd9d9a6b604cfcf55f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 937.825178] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1f944464838744fd9d9a6b604cfcf55f [ 937.894679] env[61273]: DEBUG nova.network.neutron [-] [instance: 80491222-910b-48e1-a431-3116c336a9a5] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 937.895150] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg fb94849d7d08458e8167163bbcc08d2f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 937.902741] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fb94849d7d08458e8167163bbcc08d2f [ 937.943366] env[61273]: DEBUG nova.network.neutron [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 937.965820] env[61273]: DEBUG oslo_concurrency.lockutils [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Acquiring lock "57a60fa5-be59-48b8-b72a-6f7d945ba821" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 937.966053] env[61273]: DEBUG oslo_concurrency.lockutils [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Lock "57a60fa5-be59-48b8-b72a-6f7d945ba821" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 937.966491] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Expecting reply to msg fca9230458a74af99f71f9e2416b3638 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 937.977839] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fca9230458a74af99f71f9e2416b3638 [ 938.031470] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25510f5e-0fb9-483e-aaed-e2a5c3cd7280 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.040635] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccc3702f-8bbc-41bc-b72a-7df7e9437cd5 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.044267] env[61273]: DEBUG nova.network.neutron [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 938.044759] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 7ec42eada86b4169868536c6ab6c2e9f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 938.798907] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7ec42eada86b4169868536c6ab6c2e9f [ 938.801395] env[61273]: INFO nova.compute.manager [-] [instance: 80491222-910b-48e1-a431-3116c336a9a5] Took 1.43 seconds to deallocate network for instance. [ 938.801755] env[61273]: DEBUG nova.compute.manager [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 938.803484] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Expecting reply to msg 1049b9657de24d8b871c26bc653b3ed6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 938.808082] env[61273]: DEBUG oslo_concurrency.lockutils [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Releasing lock "refresh_cache-77ca81dc-6322-41de-aaee-adf36d6ce79f" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 938.808082] env[61273]: DEBUG nova.compute.manager [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 938.808082] env[61273]: DEBUG nova.compute.manager [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 938.808082] env[61273]: DEBUG nova.network.neutron [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 938.810996] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af5c8829-3039-49f3-8f75-d7eef151ed41 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.812349] env[61273]: DEBUG nova.compute.claims [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 80491222-910b-48e1-a431-3116c336a9a5] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 938.813112] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 938.817415] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13b1d1fb-654c-4985-a5bd-08fb3de5b5f1 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.830363] env[61273]: DEBUG nova.compute.provider_tree [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 938.830844] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 93365eec8d05411f80ad4bac382d9a65 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 938.832164] env[61273]: DEBUG nova.network.neutron [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 938.834291] env[61273]: DEBUG nova.network.neutron [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 938.835009] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 63f4f7ea10b3453a846d86d30f286c3c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 938.837085] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 93365eec8d05411f80ad4bac382d9a65 [ 938.844287] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1049b9657de24d8b871c26bc653b3ed6 [ 938.844691] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 63f4f7ea10b3453a846d86d30f286c3c [ 938.908853] env[61273]: DEBUG nova.network.neutron [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 938.909421] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg 2fea2acb1b5042189ad0558886967c68 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 938.916950] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2fea2acb1b5042189ad0558886967c68 [ 939.165656] env[61273]: DEBUG nova.compute.manager [req-22f637a6-15ee-42e2-8a7a-030dadd9ed5a req-7433e178-8e8d-4a75-b23a-cf140880de13 service nova] [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] Received event network-changed-cbe637dd-c959-40d2-af09-5b04853ed211 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 939.165870] env[61273]: DEBUG nova.compute.manager [req-22f637a6-15ee-42e2-8a7a-030dadd9ed5a req-7433e178-8e8d-4a75-b23a-cf140880de13 service nova] [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] Refreshing instance network info cache due to event network-changed-cbe637dd-c959-40d2-af09-5b04853ed211. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 939.166082] env[61273]: DEBUG oslo_concurrency.lockutils [req-22f637a6-15ee-42e2-8a7a-030dadd9ed5a req-7433e178-8e8d-4a75-b23a-cf140880de13 service nova] Acquiring lock "refresh_cache-ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 939.324312] env[61273]: DEBUG oslo_concurrency.lockutils [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 939.337323] env[61273]: DEBUG nova.scheduler.client.report [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 939.339616] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg a941549066c8477ab6bb02d7529b71b5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 939.340491] env[61273]: DEBUG nova.network.neutron [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 939.340865] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg a3d70f2fe75f4582a0db8e4cb77324fb in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 939.348730] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a3d70f2fe75f4582a0db8e4cb77324fb [ 939.351752] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a941549066c8477ab6bb02d7529b71b5 [ 939.411956] env[61273]: DEBUG oslo_concurrency.lockutils [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Releasing lock "refresh_cache-ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 939.412394] env[61273]: DEBUG nova.compute.manager [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 939.412584] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 939.412908] env[61273]: DEBUG oslo_concurrency.lockutils [req-22f637a6-15ee-42e2-8a7a-030dadd9ed5a req-7433e178-8e8d-4a75-b23a-cf140880de13 service nova] Acquired lock "refresh_cache-ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 939.413078] env[61273]: DEBUG nova.network.neutron [req-22f637a6-15ee-42e2-8a7a-030dadd9ed5a req-7433e178-8e8d-4a75-b23a-cf140880de13 service nova] [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] Refreshing network info cache for port cbe637dd-c959-40d2-af09-5b04853ed211 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 939.413501] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-22f637a6-15ee-42e2-8a7a-030dadd9ed5a req-7433e178-8e8d-4a75-b23a-cf140880de13 service nova] Expecting reply to msg 7b094f07dabc4f25a904a546cd9ae8d4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 939.414563] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7015c42c-2737-4166-bd3f-5504317048b4 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.423522] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d7dcac6-8feb-433d-866d-416b267df05b {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.433481] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7b094f07dabc4f25a904a546cd9ae8d4 [ 939.444658] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f could not be found. [ 939.444857] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 939.445032] env[61273]: INFO nova.compute.manager [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] Took 0.03 seconds to destroy the instance on the hypervisor. [ 939.445262] env[61273]: DEBUG oslo.service.loopingcall [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 939.445467] env[61273]: DEBUG nova.compute.manager [-] [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 939.445557] env[61273]: DEBUG nova.network.neutron [-] [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 939.469627] env[61273]: DEBUG nova.network.neutron [-] [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 939.470130] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg bd15172999cc49739a39d23930d2c064 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 939.476430] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bd15172999cc49739a39d23930d2c064 [ 939.843052] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.425s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 939.843617] env[61273]: ERROR nova.compute.manager [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 6dbb762f-c02f-40ac-8377-f96ca090d475, please check neutron logs for more information. [ 939.843617] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] Traceback (most recent call last): [ 939.843617] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 939.843617] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] self.driver.spawn(context, instance, image_meta, [ 939.843617] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 939.843617] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] self._vmops.spawn(context, instance, image_meta, injected_files, [ 939.843617] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 939.843617] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] vm_ref = self.build_virtual_machine(instance, [ 939.843617] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 939.843617] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] vif_infos = vmwarevif.get_vif_info(self._session, [ 939.843617] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 939.844305] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] for vif in network_info: [ 939.844305] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 939.844305] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] return self._sync_wrapper(fn, *args, **kwargs) [ 939.844305] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 939.844305] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] self.wait() [ 939.844305] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 939.844305] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] self[:] = self._gt.wait() [ 939.844305] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 939.844305] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] return self._exit_event.wait() [ 939.844305] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 939.844305] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] current.throw(*self._exc) [ 939.844305] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 939.844305] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] result = function(*args, **kwargs) [ 939.844983] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 939.844983] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] return func(*args, **kwargs) [ 939.844983] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 939.844983] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] raise e [ 939.844983] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 939.844983] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] nwinfo = self.network_api.allocate_for_instance( [ 939.844983] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 939.844983] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] created_port_ids = self._update_ports_for_instance( [ 939.844983] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 939.844983] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] with excutils.save_and_reraise_exception(): [ 939.844983] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 939.844983] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] self.force_reraise() [ 939.844983] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 939.845608] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] raise self.value [ 939.845608] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 939.845608] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] updated_port = self._update_port( [ 939.845608] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 939.845608] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] _ensure_no_port_binding_failure(port) [ 939.845608] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 939.845608] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] raise exception.PortBindingFailed(port_id=port['id']) [ 939.845608] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] nova.exception.PortBindingFailed: Binding failed for port 6dbb762f-c02f-40ac-8377-f96ca090d475, please check neutron logs for more information. [ 939.845608] env[61273]: ERROR nova.compute.manager [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] [ 939.845608] env[61273]: DEBUG nova.compute.utils [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] Binding failed for port 6dbb762f-c02f-40ac-8377-f96ca090d475, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 939.846038] env[61273]: INFO nova.compute.manager [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 77ca81dc-6322-41de-aaee-adf36d6ce79f] Took 1.04 seconds to deallocate network for instance. [ 939.847316] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 3ce61b19720444498516468553e1508d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 939.848386] env[61273]: DEBUG nova.compute.manager [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] Build of instance d10134a3-6f70-4f00-b810-371e17d2a1ab was re-scheduled: Binding failed for port 6dbb762f-c02f-40ac-8377-f96ca090d475, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 939.848784] env[61273]: DEBUG nova.compute.manager [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 939.849022] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Acquiring lock "refresh_cache-d10134a3-6f70-4f00-b810-371e17d2a1ab" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 939.849167] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Acquired lock "refresh_cache-d10134a3-6f70-4f00-b810-371e17d2a1ab" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 939.849319] env[61273]: DEBUG nova.network.neutron [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 939.849650] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg e7087582988a43b8b7c9ed21aea76567 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 939.850324] env[61273]: DEBUG oslo_concurrency.lockutils [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 6.965s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 939.851954] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] Expecting reply to msg d5922bccb239405da561e9dbae6b7721 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 939.855783] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e7087582988a43b8b7c9ed21aea76567 [ 939.888149] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d5922bccb239405da561e9dbae6b7721 [ 939.905134] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3ce61b19720444498516468553e1508d [ 939.932774] env[61273]: DEBUG nova.network.neutron [req-22f637a6-15ee-42e2-8a7a-030dadd9ed5a req-7433e178-8e8d-4a75-b23a-cf140880de13 service nova] [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 939.972405] env[61273]: DEBUG nova.network.neutron [-] [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 939.973117] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg d378cb4f0c5d438b9ae5a9890778b1aa in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 939.980709] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d378cb4f0c5d438b9ae5a9890778b1aa [ 939.995997] env[61273]: DEBUG nova.network.neutron [req-22f637a6-15ee-42e2-8a7a-030dadd9ed5a req-7433e178-8e8d-4a75-b23a-cf140880de13 service nova] [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 939.996553] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-22f637a6-15ee-42e2-8a7a-030dadd9ed5a req-7433e178-8e8d-4a75-b23a-cf140880de13 service nova] Expecting reply to msg 5fe5ad7f036e43e4af1d10298f06eabe in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 940.004450] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5fe5ad7f036e43e4af1d10298f06eabe [ 940.355978] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 2eec8083139a4d59b03ccc607558268b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 940.376135] env[61273]: DEBUG nova.network.neutron [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 940.391559] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2eec8083139a4d59b03ccc607558268b [ 940.447381] env[61273]: DEBUG nova.network.neutron [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 940.448011] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg b8c40ddbc2324d2e9c15a1d644ef98c6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 940.452142] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14266901-0180-4cd7-88f6-a03d92c01497 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.455515] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b8c40ddbc2324d2e9c15a1d644ef98c6 [ 940.459449] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-156f518a-bebd-4cdd-9603-943dcd8adeca {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.489014] env[61273]: INFO nova.compute.manager [-] [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] Took 1.04 seconds to deallocate network for instance. [ 940.491130] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c38cd4ce-e004-427c-a476-2250b1fe054f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.493674] env[61273]: DEBUG nova.compute.claims [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 940.493844] env[61273]: DEBUG oslo_concurrency.lockutils [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 940.497916] env[61273]: DEBUG oslo_concurrency.lockutils [req-22f637a6-15ee-42e2-8a7a-030dadd9ed5a req-7433e178-8e8d-4a75-b23a-cf140880de13 service nova] Releasing lock "refresh_cache-ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 940.498514] env[61273]: DEBUG nova.compute.manager [req-22f637a6-15ee-42e2-8a7a-030dadd9ed5a req-7433e178-8e8d-4a75-b23a-cf140880de13 service nova] [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] Received event network-vif-deleted-cbe637dd-c959-40d2-af09-5b04853ed211 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 940.499320] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7093a123-c24d-4a55-9be7-fd215e952e13 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.513822] env[61273]: DEBUG nova.compute.provider_tree [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 940.514632] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] Expecting reply to msg e821cb89c8084734bd52a579e4418781 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 940.521360] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e821cb89c8084734bd52a579e4418781 [ 940.878413] env[61273]: INFO nova.scheduler.client.report [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Deleted allocations for instance 77ca81dc-6322-41de-aaee-adf36d6ce79f [ 940.884676] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg d9bfbbd850944a84850d38283cc9d458 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 940.894257] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d9bfbbd850944a84850d38283cc9d458 [ 940.950475] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Releasing lock "refresh_cache-d10134a3-6f70-4f00-b810-371e17d2a1ab" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 940.950704] env[61273]: DEBUG nova.compute.manager [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 940.950883] env[61273]: DEBUG nova.compute.manager [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 940.951044] env[61273]: DEBUG nova.network.neutron [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 940.965552] env[61273]: DEBUG nova.network.neutron [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 940.966051] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 6d3525fce6774cdf85313eaaacb1af13 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 940.971424] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6d3525fce6774cdf85313eaaacb1af13 [ 941.016357] env[61273]: DEBUG nova.scheduler.client.report [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 941.018713] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] Expecting reply to msg 001fbc92923946089c52c8d75ada57ad in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 941.028840] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 001fbc92923946089c52c8d75ada57ad [ 941.386407] env[61273]: DEBUG oslo_concurrency.lockutils [None req-a2ff395f-a2bd-4329-8796-92c04a0ca5ac tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Lock "77ca81dc-6322-41de-aaee-adf36d6ce79f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.319s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 941.468254] env[61273]: DEBUG nova.network.neutron [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 941.468856] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg a1fceec4ad17440988b5785479f1b047 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 941.477093] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a1fceec4ad17440988b5785479f1b047 [ 941.521148] env[61273]: DEBUG oslo_concurrency.lockutils [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.670s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 941.521565] env[61273]: ERROR nova.compute.manager [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] [instance: 2178d195-47f4-47ab-9140-b8f849973434] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port becb8351-4f8e-4d86-870c-e7e66f60b598, please check neutron logs for more information. [ 941.521565] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] Traceback (most recent call last): [ 941.521565] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 941.521565] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] self.driver.spawn(context, instance, image_meta, [ 941.521565] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 941.521565] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] self._vmops.spawn(context, instance, image_meta, injected_files, [ 941.521565] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 941.521565] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] vm_ref = self.build_virtual_machine(instance, [ 941.521565] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 941.521565] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] vif_infos = vmwarevif.get_vif_info(self._session, [ 941.521565] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 941.521967] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] for vif in network_info: [ 941.521967] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 941.521967] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] return self._sync_wrapper(fn, *args, **kwargs) [ 941.521967] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 941.521967] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] self.wait() [ 941.521967] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 941.521967] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] self[:] = self._gt.wait() [ 941.521967] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 941.521967] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] return self._exit_event.wait() [ 941.521967] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 941.521967] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] current.throw(*self._exc) [ 941.521967] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 941.521967] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] result = function(*args, **kwargs) [ 941.522396] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 941.522396] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] return func(*args, **kwargs) [ 941.522396] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 941.522396] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] raise e [ 941.522396] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 941.522396] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] nwinfo = self.network_api.allocate_for_instance( [ 941.522396] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 941.522396] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] created_port_ids = self._update_ports_for_instance( [ 941.522396] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 941.522396] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] with excutils.save_and_reraise_exception(): [ 941.522396] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 941.522396] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] self.force_reraise() [ 941.522396] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 941.522827] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] raise self.value [ 941.522827] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 941.522827] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] updated_port = self._update_port( [ 941.522827] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 941.522827] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] _ensure_no_port_binding_failure(port) [ 941.522827] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 941.522827] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] raise exception.PortBindingFailed(port_id=port['id']) [ 941.522827] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] nova.exception.PortBindingFailed: Binding failed for port becb8351-4f8e-4d86-870c-e7e66f60b598, please check neutron logs for more information. [ 941.522827] env[61273]: ERROR nova.compute.manager [instance: 2178d195-47f4-47ab-9140-b8f849973434] [ 941.522827] env[61273]: DEBUG nova.compute.utils [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] [instance: 2178d195-47f4-47ab-9140-b8f849973434] Binding failed for port becb8351-4f8e-4d86-870c-e7e66f60b598, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 941.523529] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 6.466s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 941.523703] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 941.523854] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61273) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 941.524164] env[61273]: DEBUG oslo_concurrency.lockutils [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.164s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 941.525629] env[61273]: INFO nova.compute.claims [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 941.527142] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 9bdbdadcdfdd436f833433b90ca93c2d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 941.528651] env[61273]: DEBUG nova.compute.manager [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] [instance: 2178d195-47f4-47ab-9140-b8f849973434] Build of instance 2178d195-47f4-47ab-9140-b8f849973434 was re-scheduled: Binding failed for port becb8351-4f8e-4d86-870c-e7e66f60b598, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 941.529061] env[61273]: DEBUG nova.compute.manager [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] [instance: 2178d195-47f4-47ab-9140-b8f849973434] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 941.529282] env[61273]: DEBUG oslo_concurrency.lockutils [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] Acquiring lock "refresh_cache-2178d195-47f4-47ab-9140-b8f849973434" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 941.529422] env[61273]: DEBUG oslo_concurrency.lockutils [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] Acquired lock "refresh_cache-2178d195-47f4-47ab-9140-b8f849973434" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 941.529576] env[61273]: DEBUG nova.network.neutron [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] [instance: 2178d195-47f4-47ab-9140-b8f849973434] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 941.529932] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] Expecting reply to msg c4a4129d389e496a9a17ca0e8950e65e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 941.531122] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a69aa30d-68ef-436a-a44b-b0724aaa1cf8 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.537206] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c4a4129d389e496a9a17ca0e8950e65e [ 941.541299] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63eed828-d551-4edf-8862-3c273a036973 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.556889] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be8586e5-4a03-452c-a743-fb7a63626443 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.563476] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b4fe4ff-5182-4d2b-91b5-fa8709a5ab99 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.567727] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9bdbdadcdfdd436f833433b90ca93c2d [ 941.593196] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181714MB free_disk=141GB free_vcpus=48 pci_devices=None {{(pid=61273) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 941.593361] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 941.971073] env[61273]: INFO nova.compute.manager [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: d10134a3-6f70-4f00-b810-371e17d2a1ab] Took 1.02 seconds to deallocate network for instance. [ 941.972985] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 417e29900af54bb2bac0f9959c86f124 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 942.012432] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 417e29900af54bb2bac0f9959c86f124 [ 942.036033] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 9dcb31d085754815bfcd7475190d8a62 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 942.044240] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9dcb31d085754815bfcd7475190d8a62 [ 942.052249] env[61273]: DEBUG nova.network.neutron [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] [instance: 2178d195-47f4-47ab-9140-b8f849973434] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 942.131826] env[61273]: DEBUG nova.network.neutron [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] [instance: 2178d195-47f4-47ab-9140-b8f849973434] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.132334] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] Expecting reply to msg 00074168644b42f680fb7afed3acc854 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 942.139832] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 00074168644b42f680fb7afed3acc854 [ 942.477964] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 2a725bfe203a4060bdaa2f7b38402cce in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 942.508890] env[61273]: DEBUG oslo_concurrency.lockutils [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Acquiring lock "a007a7bc-b898-472b-9469-e6dd6262ea0b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 942.509127] env[61273]: DEBUG oslo_concurrency.lockutils [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Lock "a007a7bc-b898-472b-9469-e6dd6262ea0b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 942.509581] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg d9e115e0f196451aa4652a12ccdc0d81 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 942.519722] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2a725bfe203a4060bdaa2f7b38402cce [ 942.520336] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d9e115e0f196451aa4652a12ccdc0d81 [ 942.628317] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd0e8b90-3719-46dc-84fd-89283f2904c4 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.635430] env[61273]: DEBUG oslo_concurrency.lockutils [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] Releasing lock "refresh_cache-2178d195-47f4-47ab-9140-b8f849973434" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 942.635646] env[61273]: DEBUG nova.compute.manager [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 942.635929] env[61273]: DEBUG nova.compute.manager [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] [instance: 2178d195-47f4-47ab-9140-b8f849973434] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 942.636144] env[61273]: DEBUG nova.network.neutron [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] [instance: 2178d195-47f4-47ab-9140-b8f849973434] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 942.638376] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efbec5ac-ab39-4b73-bd9b-30a1ac3640b3 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.667687] env[61273]: DEBUG nova.network.neutron [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] [instance: 2178d195-47f4-47ab-9140-b8f849973434] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 942.668297] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] Expecting reply to msg e6da142e6f2c4d25ba9e6a814b5612d7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 942.669503] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74891e62-aee9-4ef1-a95a-d3c3b8a9a857 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.675976] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e6da142e6f2c4d25ba9e6a814b5612d7 [ 942.677308] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5886bc6c-bfd6-49f1-9fc7-7a2fd38fdb98 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.690477] env[61273]: DEBUG nova.compute.provider_tree [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 942.690932] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 9ba26e58f9b44eb9a103a7124d578445 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 942.698462] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9ba26e58f9b44eb9a103a7124d578445 [ 942.997181] env[61273]: INFO nova.scheduler.client.report [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Deleted allocations for instance d10134a3-6f70-4f00-b810-371e17d2a1ab [ 943.003341] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 896ddbaf3b70492cac752a597b660df7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 943.010984] env[61273]: DEBUG nova.compute.manager [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 943.012684] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg a77aba4c20b742b680d7f7782b0ff44d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 943.020306] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 896ddbaf3b70492cac752a597b660df7 [ 943.051799] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a77aba4c20b742b680d7f7782b0ff44d [ 943.173388] env[61273]: DEBUG nova.network.neutron [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] [instance: 2178d195-47f4-47ab-9140-b8f849973434] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 943.173885] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] Expecting reply to msg e233a5858a8c40b9a3380f2908c02fbf in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 943.181603] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e233a5858a8c40b9a3380f2908c02fbf [ 943.192656] env[61273]: DEBUG nova.scheduler.client.report [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 943.194878] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 198910498a7a4fed9ead8761e7042e36 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 943.207443] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 198910498a7a4fed9ead8761e7042e36 [ 943.515803] env[61273]: DEBUG oslo_concurrency.lockutils [None req-5f94bb74-5383-4919-98dc-8ce9ab823825 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Lock "d10134a3-6f70-4f00-b810-371e17d2a1ab" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.238s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 943.530183] env[61273]: DEBUG oslo_concurrency.lockutils [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 943.676558] env[61273]: INFO nova.compute.manager [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] [instance: 2178d195-47f4-47ab-9140-b8f849973434] Took 1.04 seconds to deallocate network for instance. [ 943.678287] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] Expecting reply to msg d5813a716ab14fefa8f11d8ce8813823 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 943.697069] env[61273]: DEBUG oslo_concurrency.lockutils [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.173s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 943.697588] env[61273]: DEBUG nova.compute.manager [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 943.699252] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 6e5c701accd248f1bd71323e8cfff5ff in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 943.700187] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 4.888s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 943.702113] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg c371c95562a241babc3086f27d4a5b5e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 943.708608] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d5813a716ab14fefa8f11d8ce8813823 [ 943.727920] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6e5c701accd248f1bd71323e8cfff5ff [ 943.738136] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c371c95562a241babc3086f27d4a5b5e [ 944.183885] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] Expecting reply to msg b4eff2ed8ae040dfa926696e27be6dbb in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 944.204766] env[61273]: DEBUG nova.compute.utils [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 944.205430] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 0371ce57fb234e9c9f874daf424720b0 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 944.206394] env[61273]: DEBUG nova.compute.manager [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 944.206571] env[61273]: DEBUG nova.network.neutron [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 944.215267] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0371ce57fb234e9c9f874daf424720b0 [ 944.221946] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b4eff2ed8ae040dfa926696e27be6dbb [ 944.266967] env[61273]: DEBUG nova.policy [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3078a2af81b248f8b100f58ee66a5a86', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c06b50a7aaa742afbbd0c6fc56c3d131', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 944.308594] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6762600b-eebe-4c4e-9c69-7bcda56e2a2c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.316693] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4f05bdf-3cb7-4ac8-9de5-daf79b7f828a {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.350169] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6f8bb82-900f-4109-976f-a1b1d5e12703 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.357696] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0945a22-230f-4354-86ff-e76a374f1e5a {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.371029] env[61273]: DEBUG nova.compute.provider_tree [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 944.371548] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg 58f2122ebf8345579881e5e32ab865fc in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 944.379069] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 58f2122ebf8345579881e5e32ab865fc [ 944.642862] env[61273]: DEBUG nova.network.neutron [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] Successfully created port: c5cb1417-be25-47df-b3cd-5ad25361a83b {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 944.709996] env[61273]: INFO nova.scheduler.client.report [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] Deleted allocations for instance 2178d195-47f4-47ab-9140-b8f849973434 [ 944.716091] env[61273]: DEBUG nova.compute.manager [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 944.717598] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg a84f30e7f46e4fcea48cdf7cbe99368b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 944.721171] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] Expecting reply to msg 3862fca61a3044ff80b05a9c894c1323 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 944.745417] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3862fca61a3044ff80b05a9c894c1323 [ 944.752693] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a84f30e7f46e4fcea48cdf7cbe99368b [ 944.874323] env[61273]: DEBUG nova.scheduler.client.report [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 944.876806] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg 2eba2e77f31c4b3cb45dc177ec6558ab in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 944.889966] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2eba2e77f31c4b3cb45dc177ec6558ab [ 945.155381] env[61273]: DEBUG oslo_concurrency.lockutils [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Acquiring lock "c24218f8-6989-4e2f-88b4-f4421b66ec3e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 945.155815] env[61273]: DEBUG oslo_concurrency.lockutils [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Lock "c24218f8-6989-4e2f-88b4-f4421b66ec3e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 945.156299] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg bc81d6a5db6f41c18caf40ae83d2d0ef in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 945.165765] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bc81d6a5db6f41c18caf40ae83d2d0ef [ 945.222900] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg cfb1566508c14ee1b787760594cb1efd in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 945.224371] env[61273]: DEBUG oslo_concurrency.lockutils [None req-37c28f3f-2699-4617-9df4-f00a03d7fcd8 tempest-ServerRescueTestJSONUnderV235-2053171794 tempest-ServerRescueTestJSONUnderV235-2053171794-project-member] Lock "2178d195-47f4-47ab-9140-b8f849973434" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.877s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 945.258801] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cfb1566508c14ee1b787760594cb1efd [ 945.268841] env[61273]: DEBUG nova.compute.manager [req-0708b6cb-d07a-41c9-aa53-f121a398de5c req-552b95f5-1279-4e23-a2ad-2c78eac302f3 service nova] [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] Received event network-changed-c5cb1417-be25-47df-b3cd-5ad25361a83b {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 945.269047] env[61273]: DEBUG nova.compute.manager [req-0708b6cb-d07a-41c9-aa53-f121a398de5c req-552b95f5-1279-4e23-a2ad-2c78eac302f3 service nova] [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] Refreshing instance network info cache due to event network-changed-c5cb1417-be25-47df-b3cd-5ad25361a83b. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 945.269262] env[61273]: DEBUG oslo_concurrency.lockutils [req-0708b6cb-d07a-41c9-aa53-f121a398de5c req-552b95f5-1279-4e23-a2ad-2c78eac302f3 service nova] Acquiring lock "refresh_cache-4196c9a3-53d2-4a6d-b944-813be319cbdd" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 945.269402] env[61273]: DEBUG oslo_concurrency.lockutils [req-0708b6cb-d07a-41c9-aa53-f121a398de5c req-552b95f5-1279-4e23-a2ad-2c78eac302f3 service nova] Acquired lock "refresh_cache-4196c9a3-53d2-4a6d-b944-813be319cbdd" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 945.269545] env[61273]: DEBUG nova.network.neutron [req-0708b6cb-d07a-41c9-aa53-f121a398de5c req-552b95f5-1279-4e23-a2ad-2c78eac302f3 service nova] [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] Refreshing network info cache for port c5cb1417-be25-47df-b3cd-5ad25361a83b {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 945.269966] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-0708b6cb-d07a-41c9-aa53-f121a398de5c req-552b95f5-1279-4e23-a2ad-2c78eac302f3 service nova] Expecting reply to msg 713942b4de774f1db19b5ac15dab8f55 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 945.276448] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 713942b4de774f1db19b5ac15dab8f55 [ 945.383900] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.684s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 945.384566] env[61273]: ERROR nova.compute.manager [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 80491222-910b-48e1-a431-3116c336a9a5] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 34c92d02-9079-4f11-a58a-625d44ed4cf1, please check neutron logs for more information. [ 945.384566] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] Traceback (most recent call last): [ 945.384566] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 945.384566] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] self.driver.spawn(context, instance, image_meta, [ 945.384566] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 945.384566] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 945.384566] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 945.384566] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] vm_ref = self.build_virtual_machine(instance, [ 945.384566] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 945.384566] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] vif_infos = vmwarevif.get_vif_info(self._session, [ 945.384566] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 945.384986] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] for vif in network_info: [ 945.384986] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 945.384986] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] return self._sync_wrapper(fn, *args, **kwargs) [ 945.384986] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 945.384986] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] self.wait() [ 945.384986] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 945.384986] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] self[:] = self._gt.wait() [ 945.384986] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 945.384986] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] return self._exit_event.wait() [ 945.384986] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 945.384986] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] current.throw(*self._exc) [ 945.384986] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 945.384986] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] result = function(*args, **kwargs) [ 945.385426] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 945.385426] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] return func(*args, **kwargs) [ 945.385426] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 945.385426] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] raise e [ 945.385426] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 945.385426] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] nwinfo = self.network_api.allocate_for_instance( [ 945.385426] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 945.385426] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] created_port_ids = self._update_ports_for_instance( [ 945.385426] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 945.385426] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] with excutils.save_and_reraise_exception(): [ 945.385426] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 945.385426] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] self.force_reraise() [ 945.385426] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 945.385863] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] raise self.value [ 945.385863] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 945.385863] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] updated_port = self._update_port( [ 945.385863] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 945.385863] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] _ensure_no_port_binding_failure(port) [ 945.385863] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 945.385863] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] raise exception.PortBindingFailed(port_id=port['id']) [ 945.385863] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] nova.exception.PortBindingFailed: Binding failed for port 34c92d02-9079-4f11-a58a-625d44ed4cf1, please check neutron logs for more information. [ 945.385863] env[61273]: ERROR nova.compute.manager [instance: 80491222-910b-48e1-a431-3116c336a9a5] [ 945.385863] env[61273]: DEBUG nova.compute.utils [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 80491222-910b-48e1-a431-3116c336a9a5] Binding failed for port 34c92d02-9079-4f11-a58a-625d44ed4cf1, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 945.386664] env[61273]: DEBUG oslo_concurrency.lockutils [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.062s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 945.388139] env[61273]: INFO nova.compute.claims [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 945.389803] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Expecting reply to msg 48fdbe84804f4086bda0205f98a13cd9 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 945.391253] env[61273]: DEBUG nova.compute.manager [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 80491222-910b-48e1-a431-3116c336a9a5] Build of instance 80491222-910b-48e1-a431-3116c336a9a5 was re-scheduled: Binding failed for port 34c92d02-9079-4f11-a58a-625d44ed4cf1, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 945.391675] env[61273]: DEBUG nova.compute.manager [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 80491222-910b-48e1-a431-3116c336a9a5] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 945.391917] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Acquiring lock "refresh_cache-80491222-910b-48e1-a431-3116c336a9a5" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 945.392074] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Acquired lock "refresh_cache-80491222-910b-48e1-a431-3116c336a9a5" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 945.392233] env[61273]: DEBUG nova.network.neutron [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 80491222-910b-48e1-a431-3116c336a9a5] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 945.392600] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg ebe656b228ce4a8daf5d7c706523953c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 945.407805] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ebe656b228ce4a8daf5d7c706523953c [ 945.429079] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 48fdbe84804f4086bda0205f98a13cd9 [ 945.472511] env[61273]: ERROR nova.compute.manager [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port c5cb1417-be25-47df-b3cd-5ad25361a83b, please check neutron logs for more information. [ 945.472511] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 945.472511] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 945.472511] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 945.472511] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 945.472511] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 945.472511] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 945.472511] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 945.472511] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 945.472511] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 945.472511] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 945.472511] env[61273]: ERROR nova.compute.manager raise self.value [ 945.472511] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 945.472511] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 945.472511] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 945.472511] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 945.473117] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 945.473117] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 945.473117] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port c5cb1417-be25-47df-b3cd-5ad25361a83b, please check neutron logs for more information. [ 945.473117] env[61273]: ERROR nova.compute.manager [ 945.473117] env[61273]: Traceback (most recent call last): [ 945.473117] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 945.473117] env[61273]: listener.cb(fileno) [ 945.473117] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 945.473117] env[61273]: result = function(*args, **kwargs) [ 945.473117] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 945.473117] env[61273]: return func(*args, **kwargs) [ 945.473117] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 945.473117] env[61273]: raise e [ 945.473117] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 945.473117] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 945.473117] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 945.473117] env[61273]: created_port_ids = self._update_ports_for_instance( [ 945.473117] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 945.473117] env[61273]: with excutils.save_and_reraise_exception(): [ 945.473117] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 945.473117] env[61273]: self.force_reraise() [ 945.473117] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 945.473117] env[61273]: raise self.value [ 945.473117] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 945.473117] env[61273]: updated_port = self._update_port( [ 945.473117] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 945.473117] env[61273]: _ensure_no_port_binding_failure(port) [ 945.473117] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 945.473117] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 945.474123] env[61273]: nova.exception.PortBindingFailed: Binding failed for port c5cb1417-be25-47df-b3cd-5ad25361a83b, please check neutron logs for more information. [ 945.474123] env[61273]: Removing descriptor: 19 [ 945.658051] env[61273]: DEBUG nova.compute.manager [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 945.659816] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 990d95cb4ea9423896ba3766c6d3fcb8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 945.690062] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 990d95cb4ea9423896ba3766c6d3fcb8 [ 945.726455] env[61273]: DEBUG nova.compute.manager [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 945.751892] env[61273]: DEBUG nova.virt.hardware [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 945.752155] env[61273]: DEBUG nova.virt.hardware [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 945.752312] env[61273]: DEBUG nova.virt.hardware [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 945.752938] env[61273]: DEBUG nova.virt.hardware [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 945.753158] env[61273]: DEBUG nova.virt.hardware [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 945.753313] env[61273]: DEBUG nova.virt.hardware [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 945.753519] env[61273]: DEBUG nova.virt.hardware [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 945.753675] env[61273]: DEBUG nova.virt.hardware [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 945.753876] env[61273]: DEBUG nova.virt.hardware [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 945.754062] env[61273]: DEBUG nova.virt.hardware [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 945.754231] env[61273]: DEBUG nova.virt.hardware [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 945.755074] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af41d808-0577-464f-8d74-029235fb26c1 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.765033] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ed9197a-6a0d-415d-9381-991c4e65d9e4 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.784427] env[61273]: ERROR nova.compute.manager [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port c5cb1417-be25-47df-b3cd-5ad25361a83b, please check neutron logs for more information. [ 945.784427] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] Traceback (most recent call last): [ 945.784427] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 945.784427] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] yield resources [ 945.784427] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 945.784427] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] self.driver.spawn(context, instance, image_meta, [ 945.784427] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 945.784427] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 945.784427] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 945.784427] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] vm_ref = self.build_virtual_machine(instance, [ 945.784427] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 945.784914] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] vif_infos = vmwarevif.get_vif_info(self._session, [ 945.784914] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 945.784914] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] for vif in network_info: [ 945.784914] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 945.784914] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] return self._sync_wrapper(fn, *args, **kwargs) [ 945.784914] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 945.784914] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] self.wait() [ 945.784914] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 945.784914] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] self[:] = self._gt.wait() [ 945.784914] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 945.784914] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] return self._exit_event.wait() [ 945.784914] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 945.784914] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] current.throw(*self._exc) [ 945.785359] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 945.785359] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] result = function(*args, **kwargs) [ 945.785359] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 945.785359] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] return func(*args, **kwargs) [ 945.785359] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 945.785359] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] raise e [ 945.785359] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 945.785359] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] nwinfo = self.network_api.allocate_for_instance( [ 945.785359] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 945.785359] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] created_port_ids = self._update_ports_for_instance( [ 945.785359] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 945.785359] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] with excutils.save_and_reraise_exception(): [ 945.785359] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 945.785819] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] self.force_reraise() [ 945.785819] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 945.785819] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] raise self.value [ 945.785819] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 945.785819] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] updated_port = self._update_port( [ 945.785819] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 945.785819] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] _ensure_no_port_binding_failure(port) [ 945.785819] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 945.785819] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] raise exception.PortBindingFailed(port_id=port['id']) [ 945.785819] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] nova.exception.PortBindingFailed: Binding failed for port c5cb1417-be25-47df-b3cd-5ad25361a83b, please check neutron logs for more information. [ 945.785819] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] [ 945.785819] env[61273]: INFO nova.compute.manager [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] Terminating instance [ 945.786687] env[61273]: DEBUG oslo_concurrency.lockutils [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Acquiring lock "refresh_cache-4196c9a3-53d2-4a6d-b944-813be319cbdd" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 945.789258] env[61273]: DEBUG nova.network.neutron [req-0708b6cb-d07a-41c9-aa53-f121a398de5c req-552b95f5-1279-4e23-a2ad-2c78eac302f3 service nova] [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 945.857933] env[61273]: DEBUG nova.network.neutron [req-0708b6cb-d07a-41c9-aa53-f121a398de5c req-552b95f5-1279-4e23-a2ad-2c78eac302f3 service nova] [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 945.858475] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-0708b6cb-d07a-41c9-aa53-f121a398de5c req-552b95f5-1279-4e23-a2ad-2c78eac302f3 service nova] Expecting reply to msg 7b9c7293a6ce4b469836ab5fd6d5b311 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 945.866068] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7b9c7293a6ce4b469836ab5fd6d5b311 [ 945.895698] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Expecting reply to msg 61ff4973219a4a96ba51c109b6165b9c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 945.905752] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 61ff4973219a4a96ba51c109b6165b9c [ 945.922010] env[61273]: DEBUG nova.network.neutron [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 80491222-910b-48e1-a431-3116c336a9a5] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 946.004465] env[61273]: DEBUG nova.network.neutron [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 80491222-910b-48e1-a431-3116c336a9a5] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 946.004465] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg 54687d72c70c4f2eb02bb016efce2e18 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 946.015274] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 54687d72c70c4f2eb02bb016efce2e18 [ 946.185684] env[61273]: DEBUG oslo_concurrency.lockutils [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 946.360650] env[61273]: DEBUG oslo_concurrency.lockutils [req-0708b6cb-d07a-41c9-aa53-f121a398de5c req-552b95f5-1279-4e23-a2ad-2c78eac302f3 service nova] Releasing lock "refresh_cache-4196c9a3-53d2-4a6d-b944-813be319cbdd" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 946.361054] env[61273]: DEBUG oslo_concurrency.lockutils [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Acquired lock "refresh_cache-4196c9a3-53d2-4a6d-b944-813be319cbdd" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 946.361241] env[61273]: DEBUG nova.network.neutron [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 946.361666] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 723b27f1cec3449786dfd2b3175f99f1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 946.369115] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 723b27f1cec3449786dfd2b3175f99f1 [ 946.483356] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e30a1f6-a081-48e3-b6e4-9af3a7d20380 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.487217] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-393d8cae-9e76-44c8-9ff0-452b050b6d25 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.516396] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Releasing lock "refresh_cache-80491222-910b-48e1-a431-3116c336a9a5" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 946.516622] env[61273]: DEBUG nova.compute.manager [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 946.516808] env[61273]: DEBUG nova.compute.manager [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 80491222-910b-48e1-a431-3116c336a9a5] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 946.516975] env[61273]: DEBUG nova.network.neutron [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 80491222-910b-48e1-a431-3116c336a9a5] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 946.519274] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e3f6e4a-7202-4901-9ffd-f4c80675404e {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.526414] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-260b19e7-9732-4499-94c6-7a01eb691200 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.538977] env[61273]: DEBUG nova.compute.provider_tree [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 946.539453] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Expecting reply to msg 9d8fe4c207144a3ca765602bfeab7b32 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 946.540769] env[61273]: DEBUG nova.network.neutron [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 80491222-910b-48e1-a431-3116c336a9a5] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 946.541256] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg 8a0b17188da0490fb0b2223df85c2f97 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 946.546744] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9d8fe4c207144a3ca765602bfeab7b32 [ 946.548334] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8a0b17188da0490fb0b2223df85c2f97 [ 946.892430] env[61273]: DEBUG nova.network.neutron [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 946.990490] env[61273]: DEBUG nova.network.neutron [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 946.991100] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 5db2b8ce3f724723ad25628b1e3d0b95 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 946.999302] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5db2b8ce3f724723ad25628b1e3d0b95 [ 947.043886] env[61273]: DEBUG nova.scheduler.client.report [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 947.046365] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Expecting reply to msg 77e29fb306e4410db990536aa9e7005b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 947.047344] env[61273]: DEBUG nova.network.neutron [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 80491222-910b-48e1-a431-3116c336a9a5] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 947.047734] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg 5ab273edeb06473aa2b8ed3538c0557a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 947.055427] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5ab273edeb06473aa2b8ed3538c0557a [ 947.060424] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 77e29fb306e4410db990536aa9e7005b [ 947.298111] env[61273]: DEBUG nova.compute.manager [req-404eb4ca-f420-4f24-aeaa-0462f9b14fbd req-d03ecac6-d5df-4657-85b7-fba0f2b6ca39 service nova] [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] Received event network-vif-deleted-c5cb1417-be25-47df-b3cd-5ad25361a83b {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 947.493383] env[61273]: DEBUG oslo_concurrency.lockutils [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Releasing lock "refresh_cache-4196c9a3-53d2-4a6d-b944-813be319cbdd" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 947.493746] env[61273]: DEBUG nova.compute.manager [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 947.493945] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 947.494249] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e0dca637-c6bd-4d5b-b2aa-f4981302b7ad {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.502932] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eafdaa7-6aeb-4945-be1d-0468507d71d6 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.522910] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4196c9a3-53d2-4a6d-b944-813be319cbdd could not be found. [ 947.523105] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 947.523282] env[61273]: INFO nova.compute.manager [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] Took 0.03 seconds to destroy the instance on the hypervisor. [ 947.523512] env[61273]: DEBUG oslo.service.loopingcall [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 947.523724] env[61273]: DEBUG nova.compute.manager [-] [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 947.523815] env[61273]: DEBUG nova.network.neutron [-] [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 947.537675] env[61273]: DEBUG nova.network.neutron [-] [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 947.538133] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg bbb41b6452ae4fb69a0b23fab0c51ba5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 947.545018] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bbb41b6452ae4fb69a0b23fab0c51ba5 [ 947.549338] env[61273]: DEBUG oslo_concurrency.lockutils [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.163s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 947.549835] env[61273]: DEBUG nova.compute.manager [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 947.551582] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Expecting reply to msg c97d187eff8e4e9eb5003c5992c0342a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 947.553458] env[61273]: INFO nova.compute.manager [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] [instance: 80491222-910b-48e1-a431-3116c336a9a5] Took 1.04 seconds to deallocate network for instance. [ 947.555079] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg 2a0e0ab5da9a44a3a7472a16fad625af in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 947.555924] env[61273]: DEBUG oslo_concurrency.lockutils [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 7.062s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 947.557696] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg d35e23faff6b42c0ae8a2cd7504fe9a5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 947.591633] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2a0e0ab5da9a44a3a7472a16fad625af [ 947.594510] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c97d187eff8e4e9eb5003c5992c0342a [ 947.604335] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d35e23faff6b42c0ae8a2cd7504fe9a5 [ 948.040736] env[61273]: DEBUG nova.network.neutron [-] [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 948.041250] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg a02a54a82dc94c3e861d9c3170bc2177 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 948.049233] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a02a54a82dc94c3e861d9c3170bc2177 [ 948.061092] env[61273]: DEBUG nova.compute.utils [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 948.061725] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Expecting reply to msg 842b486344e3444798c7c12dafd8c78c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 948.064593] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg 1e008358fe164d948f976c5162ad629d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 948.068749] env[61273]: DEBUG nova.compute.manager [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 948.069118] env[61273]: DEBUG nova.network.neutron [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 948.070926] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 842b486344e3444798c7c12dafd8c78c [ 948.104534] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1e008358fe164d948f976c5162ad629d [ 948.132749] env[61273]: DEBUG nova.policy [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5345fbbca90446719473829e2ea02386', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a36723dc732b444e8831b049e9f804b8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 948.156950] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7723c936-b416-4438-be0a-9f412de82f76 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.164766] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb683a1c-88e1-4b88-8e08-788c1357fde3 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.197180] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40e6d269-cb24-4515-bb48-d6c71e920eac {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.205324] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fce423b7-b29f-4210-a745-ee52945b8fc8 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.218270] env[61273]: DEBUG nova.compute.provider_tree [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 948.218748] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg 8aea8a71457e4925a88ee338bc928ba4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 948.225914] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8aea8a71457e4925a88ee338bc928ba4 [ 948.431511] env[61273]: DEBUG nova.network.neutron [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] Successfully created port: bc0d1c65-eaa1-4791-8213-aad49456dbfb {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 948.543408] env[61273]: INFO nova.compute.manager [-] [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] Took 1.02 seconds to deallocate network for instance. [ 948.545823] env[61273]: DEBUG nova.compute.claims [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 948.545935] env[61273]: DEBUG oslo_concurrency.lockutils [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 948.572510] env[61273]: DEBUG nova.compute.manager [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 948.574311] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Expecting reply to msg c91ccc750b77447cad80bb12446f84c8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 948.593521] env[61273]: INFO nova.scheduler.client.report [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Deleted allocations for instance 80491222-910b-48e1-a431-3116c336a9a5 [ 948.603624] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Expecting reply to msg 02140a0d0288435c8cd5e0962b32370f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 948.616284] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c91ccc750b77447cad80bb12446f84c8 [ 948.621292] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 02140a0d0288435c8cd5e0962b32370f [ 948.721622] env[61273]: DEBUG nova.scheduler.client.report [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 948.724202] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg 5aacfe149c0b498a8eb0f8f61e894557 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 948.736515] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5aacfe149c0b498a8eb0f8f61e894557 [ 949.080479] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Expecting reply to msg ff95d5732c354c989257f8aa1810701d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 949.105500] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e98086e1-3b53-4393-9b50-74f1a5099f1b tempest-AttachInterfacesTestJSON-669022114 tempest-AttachInterfacesTestJSON-669022114-project-member] Lock "80491222-910b-48e1-a431-3116c336a9a5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.300s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 949.117996] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ff95d5732c354c989257f8aa1810701d [ 949.227660] env[61273]: DEBUG oslo_concurrency.lockutils [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.670s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 949.227660] env[61273]: ERROR nova.compute.manager [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port cbe637dd-c959-40d2-af09-5b04853ed211, please check neutron logs for more information. [ 949.227660] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] Traceback (most recent call last): [ 949.227660] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 949.227660] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] self.driver.spawn(context, instance, image_meta, [ 949.227660] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 949.227660] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 949.227660] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 949.227660] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] vm_ref = self.build_virtual_machine(instance, [ 949.228020] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 949.228020] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] vif_infos = vmwarevif.get_vif_info(self._session, [ 949.228020] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 949.228020] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] for vif in network_info: [ 949.228020] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 949.228020] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] return self._sync_wrapper(fn, *args, **kwargs) [ 949.228020] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 949.228020] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] self.wait() [ 949.228020] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 949.228020] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] self[:] = self._gt.wait() [ 949.228020] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 949.228020] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] return self._exit_event.wait() [ 949.228020] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 949.228444] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] current.throw(*self._exc) [ 949.228444] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 949.228444] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] result = function(*args, **kwargs) [ 949.228444] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 949.228444] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] return func(*args, **kwargs) [ 949.228444] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 949.228444] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] raise e [ 949.228444] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 949.228444] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] nwinfo = self.network_api.allocate_for_instance( [ 949.228444] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 949.228444] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] created_port_ids = self._update_ports_for_instance( [ 949.228444] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 949.228444] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] with excutils.save_and_reraise_exception(): [ 949.228865] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 949.228865] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] self.force_reraise() [ 949.228865] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 949.228865] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] raise self.value [ 949.228865] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 949.228865] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] updated_port = self._update_port( [ 949.228865] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 949.228865] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] _ensure_no_port_binding_failure(port) [ 949.228865] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 949.228865] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] raise exception.PortBindingFailed(port_id=port['id']) [ 949.228865] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] nova.exception.PortBindingFailed: Binding failed for port cbe637dd-c959-40d2-af09-5b04853ed211, please check neutron logs for more information. [ 949.228865] env[61273]: ERROR nova.compute.manager [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] [ 949.229217] env[61273]: DEBUG nova.compute.utils [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] Binding failed for port cbe637dd-c959-40d2-af09-5b04853ed211, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 949.229217] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 7.636s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 949.229992] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 92563c95701848979896fc86575c340a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 949.230988] env[61273]: DEBUG nova.compute.manager [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] Build of instance ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f was re-scheduled: Binding failed for port cbe637dd-c959-40d2-af09-5b04853ed211, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 949.231409] env[61273]: DEBUG nova.compute.manager [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 949.231663] env[61273]: DEBUG oslo_concurrency.lockutils [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Acquiring lock "refresh_cache-ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 949.231861] env[61273]: DEBUG oslo_concurrency.lockutils [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Acquired lock "refresh_cache-ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 949.232054] env[61273]: DEBUG nova.network.neutron [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 949.232428] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg ec3fbdc192f94b9c81409c6d54446ced in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 949.240459] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ec3fbdc192f94b9c81409c6d54446ced [ 949.244484] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 92563c95701848979896fc86575c340a [ 949.332906] env[61273]: DEBUG nova.compute.manager [req-46a1dc93-0054-428f-8a3e-e2691a729e43 req-d2f3b8fd-db1c-4bd2-961b-e051277b2b97 service nova] [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] Received event network-changed-bc0d1c65-eaa1-4791-8213-aad49456dbfb {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 949.333100] env[61273]: DEBUG nova.compute.manager [req-46a1dc93-0054-428f-8a3e-e2691a729e43 req-d2f3b8fd-db1c-4bd2-961b-e051277b2b97 service nova] [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] Refreshing instance network info cache due to event network-changed-bc0d1c65-eaa1-4791-8213-aad49456dbfb. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 949.333308] env[61273]: DEBUG oslo_concurrency.lockutils [req-46a1dc93-0054-428f-8a3e-e2691a729e43 req-d2f3b8fd-db1c-4bd2-961b-e051277b2b97 service nova] Acquiring lock "refresh_cache-57a60fa5-be59-48b8-b72a-6f7d945ba821" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 949.333443] env[61273]: DEBUG oslo_concurrency.lockutils [req-46a1dc93-0054-428f-8a3e-e2691a729e43 req-d2f3b8fd-db1c-4bd2-961b-e051277b2b97 service nova] Acquired lock "refresh_cache-57a60fa5-be59-48b8-b72a-6f7d945ba821" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 949.333602] env[61273]: DEBUG nova.network.neutron [req-46a1dc93-0054-428f-8a3e-e2691a729e43 req-d2f3b8fd-db1c-4bd2-961b-e051277b2b97 service nova] [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] Refreshing network info cache for port bc0d1c65-eaa1-4791-8213-aad49456dbfb {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 949.334031] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-46a1dc93-0054-428f-8a3e-e2691a729e43 req-d2f3b8fd-db1c-4bd2-961b-e051277b2b97 service nova] Expecting reply to msg 69e31c8472bf4e90afd68fb1b612f484 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 949.341212] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 69e31c8472bf4e90afd68fb1b612f484 [ 949.463802] env[61273]: ERROR nova.compute.manager [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port bc0d1c65-eaa1-4791-8213-aad49456dbfb, please check neutron logs for more information. [ 949.463802] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 949.463802] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 949.463802] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 949.463802] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 949.463802] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 949.463802] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 949.463802] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 949.463802] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 949.463802] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 949.463802] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 949.463802] env[61273]: ERROR nova.compute.manager raise self.value [ 949.463802] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 949.463802] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 949.463802] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 949.463802] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 949.464343] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 949.464343] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 949.464343] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port bc0d1c65-eaa1-4791-8213-aad49456dbfb, please check neutron logs for more information. [ 949.464343] env[61273]: ERROR nova.compute.manager [ 949.464343] env[61273]: Traceback (most recent call last): [ 949.464343] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 949.464343] env[61273]: listener.cb(fileno) [ 949.464343] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 949.464343] env[61273]: result = function(*args, **kwargs) [ 949.464343] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 949.464343] env[61273]: return func(*args, **kwargs) [ 949.464343] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 949.464343] env[61273]: raise e [ 949.464343] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 949.464343] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 949.464343] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 949.464343] env[61273]: created_port_ids = self._update_ports_for_instance( [ 949.464343] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 949.464343] env[61273]: with excutils.save_and_reraise_exception(): [ 949.464343] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 949.464343] env[61273]: self.force_reraise() [ 949.464343] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 949.464343] env[61273]: raise self.value [ 949.464343] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 949.464343] env[61273]: updated_port = self._update_port( [ 949.464343] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 949.464343] env[61273]: _ensure_no_port_binding_failure(port) [ 949.464343] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 949.464343] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 949.465179] env[61273]: nova.exception.PortBindingFailed: Binding failed for port bc0d1c65-eaa1-4791-8213-aad49456dbfb, please check neutron logs for more information. [ 949.465179] env[61273]: Removing descriptor: 19 [ 949.584163] env[61273]: DEBUG nova.compute.manager [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 949.607536] env[61273]: DEBUG nova.virt.hardware [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 949.608267] env[61273]: DEBUG nova.virt.hardware [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 949.608363] env[61273]: DEBUG nova.virt.hardware [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 949.608552] env[61273]: DEBUG nova.virt.hardware [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 949.608700] env[61273]: DEBUG nova.virt.hardware [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 949.608848] env[61273]: DEBUG nova.virt.hardware [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 949.609121] env[61273]: DEBUG nova.virt.hardware [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 949.609235] env[61273]: DEBUG nova.virt.hardware [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 949.609397] env[61273]: DEBUG nova.virt.hardware [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 949.609851] env[61273]: DEBUG nova.virt.hardware [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 949.610053] env[61273]: DEBUG nova.virt.hardware [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 949.611157] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80a040da-3235-48ea-93f9-17c8736634f7 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.620727] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f539a131-cb9c-4d49-b98c-0e72c04ee296 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.637701] env[61273]: ERROR nova.compute.manager [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port bc0d1c65-eaa1-4791-8213-aad49456dbfb, please check neutron logs for more information. [ 949.637701] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] Traceback (most recent call last): [ 949.637701] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 949.637701] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] yield resources [ 949.637701] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 949.637701] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] self.driver.spawn(context, instance, image_meta, [ 949.637701] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 949.637701] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] self._vmops.spawn(context, instance, image_meta, injected_files, [ 949.637701] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 949.637701] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] vm_ref = self.build_virtual_machine(instance, [ 949.637701] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 949.638229] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] vif_infos = vmwarevif.get_vif_info(self._session, [ 949.638229] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 949.638229] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] for vif in network_info: [ 949.638229] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 949.638229] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] return self._sync_wrapper(fn, *args, **kwargs) [ 949.638229] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 949.638229] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] self.wait() [ 949.638229] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 949.638229] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] self[:] = self._gt.wait() [ 949.638229] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 949.638229] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] return self._exit_event.wait() [ 949.638229] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 949.638229] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] current.throw(*self._exc) [ 949.638811] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 949.638811] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] result = function(*args, **kwargs) [ 949.638811] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 949.638811] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] return func(*args, **kwargs) [ 949.638811] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 949.638811] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] raise e [ 949.638811] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 949.638811] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] nwinfo = self.network_api.allocate_for_instance( [ 949.638811] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 949.638811] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] created_port_ids = self._update_ports_for_instance( [ 949.638811] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 949.638811] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] with excutils.save_and_reraise_exception(): [ 949.638811] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 949.639401] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] self.force_reraise() [ 949.639401] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 949.639401] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] raise self.value [ 949.639401] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 949.639401] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] updated_port = self._update_port( [ 949.639401] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 949.639401] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] _ensure_no_port_binding_failure(port) [ 949.639401] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 949.639401] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] raise exception.PortBindingFailed(port_id=port['id']) [ 949.639401] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] nova.exception.PortBindingFailed: Binding failed for port bc0d1c65-eaa1-4791-8213-aad49456dbfb, please check neutron logs for more information. [ 949.639401] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] [ 949.639401] env[61273]: INFO nova.compute.manager [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] Terminating instance [ 949.640359] env[61273]: DEBUG oslo_concurrency.lockutils [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Acquiring lock "refresh_cache-57a60fa5-be59-48b8-b72a-6f7d945ba821" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 949.735154] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 497c9806c7b940d59eef2af90a5a3def in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 949.757305] env[61273]: DEBUG nova.network.neutron [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 949.769501] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 497c9806c7b940d59eef2af90a5a3def [ 949.820294] env[61273]: DEBUG nova.network.neutron [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 949.820976] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg f9d4175a66f54a8ba06c8b4cff794243 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 949.829333] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f9d4175a66f54a8ba06c8b4cff794243 [ 949.852257] env[61273]: DEBUG nova.network.neutron [req-46a1dc93-0054-428f-8a3e-e2691a729e43 req-d2f3b8fd-db1c-4bd2-961b-e051277b2b97 service nova] [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 949.928165] env[61273]: DEBUG nova.network.neutron [req-46a1dc93-0054-428f-8a3e-e2691a729e43 req-d2f3b8fd-db1c-4bd2-961b-e051277b2b97 service nova] [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 949.928835] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-46a1dc93-0054-428f-8a3e-e2691a729e43 req-d2f3b8fd-db1c-4bd2-961b-e051277b2b97 service nova] Expecting reply to msg 79c160fa09b34efd952fca24c67545d8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 949.936618] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 79c160fa09b34efd952fca24c67545d8 [ 950.260355] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg a7c94114b3484474920d4e3f1d57a049 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 950.269285] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a7c94114b3484474920d4e3f1d57a049 [ 950.323038] env[61273]: DEBUG oslo_concurrency.lockutils [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Releasing lock "refresh_cache-ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 950.323454] env[61273]: DEBUG nova.compute.manager [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 950.323865] env[61273]: DEBUG nova.compute.manager [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 950.324239] env[61273]: DEBUG nova.network.neutron [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 950.339413] env[61273]: DEBUG nova.network.neutron [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 950.340110] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg 87bc1651b1314001855e62b85f616e3f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 950.348100] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 87bc1651b1314001855e62b85f616e3f [ 950.431608] env[61273]: DEBUG oslo_concurrency.lockutils [req-46a1dc93-0054-428f-8a3e-e2691a729e43 req-d2f3b8fd-db1c-4bd2-961b-e051277b2b97 service nova] Releasing lock "refresh_cache-57a60fa5-be59-48b8-b72a-6f7d945ba821" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 950.432095] env[61273]: DEBUG oslo_concurrency.lockutils [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Acquired lock "refresh_cache-57a60fa5-be59-48b8-b72a-6f7d945ba821" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 950.432288] env[61273]: DEBUG nova.network.neutron [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 950.432739] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Expecting reply to msg c87d2a2637ec4b2d98ad92417443d3cf in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 950.439529] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c87d2a2637ec4b2d98ad92417443d3cf [ 950.765681] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 950.765681] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance 4196c9a3-53d2-4a6d-b944-813be319cbdd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 950.765681] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance 57a60fa5-be59-48b8-b72a-6f7d945ba821 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 950.765681] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 9c25e198cf0e475585e30d4531d328e9 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 950.775536] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9c25e198cf0e475585e30d4531d328e9 [ 950.967454] env[61273]: DEBUG nova.network.neutron [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 950.967454] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg ea257e87f947473bab1e81e9ca74fc76 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 950.967454] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ea257e87f947473bab1e81e9ca74fc76 [ 950.967454] env[61273]: DEBUG nova.network.neutron [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 951.057274] env[61273]: DEBUG nova.network.neutron [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 951.057848] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Expecting reply to msg fda0db6d57e5462293f5a2ac41f2f91b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 951.066398] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fda0db6d57e5462293f5a2ac41f2f91b [ 951.268061] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance a007a7bc-b898-472b-9469-e6dd6262ea0b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 951.268878] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 0453772e06ed4286bcb6ccbcf91fa19c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 951.281401] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0453772e06ed4286bcb6ccbcf91fa19c [ 951.351878] env[61273]: INFO nova.compute.manager [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] [instance: ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f] Took 1.03 seconds to deallocate network for instance. [ 951.353751] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg 23eb923a851a4e20929375a9e5dc947e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 951.373671] env[61273]: DEBUG nova.compute.manager [req-e30182fe-fa52-47ab-8a47-e14095ec0860 req-ad830576-baa6-4e93-828d-661e637b7b73 service nova] [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] Received event network-vif-deleted-bc0d1c65-eaa1-4791-8213-aad49456dbfb {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 951.389894] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 23eb923a851a4e20929375a9e5dc947e [ 951.560184] env[61273]: DEBUG oslo_concurrency.lockutils [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Releasing lock "refresh_cache-57a60fa5-be59-48b8-b72a-6f7d945ba821" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 951.560745] env[61273]: DEBUG nova.compute.manager [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 951.561036] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 951.561425] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1f28be44-d209-4f97-bdf0-1eea60d3ec76 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.572931] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f48bee17-85a5-412f-81fd-e4e2411952eb {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.601660] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 57a60fa5-be59-48b8-b72a-6f7d945ba821 could not be found. [ 951.601985] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 951.602260] env[61273]: INFO nova.compute.manager [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] Took 0.04 seconds to destroy the instance on the hypervisor. [ 951.602632] env[61273]: DEBUG oslo.service.loopingcall [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 951.602985] env[61273]: DEBUG nova.compute.manager [-] [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 951.603144] env[61273]: DEBUG nova.network.neutron [-] [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 951.624253] env[61273]: DEBUG nova.network.neutron [-] [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 951.624648] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg df0202098b9741048c5541765bb8db6e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 951.631412] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg df0202098b9741048c5541765bb8db6e [ 951.776045] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance c24218f8-6989-4e2f-88b4-f4421b66ec3e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 951.776340] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=61273) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 951.776424] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=61273) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 951.859364] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg 0c97c32b25574aec92139908e9f91088 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 951.872892] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d03c9d33-98ff-49dc-8130-d778c0201507 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.880610] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2b41539-6ba6-483f-ad57-e147b7d160ac {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.910589] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0c97c32b25574aec92139908e9f91088 [ 951.911781] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-101a545f-c351-485b-b5a4-a6c41885daa4 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.919536] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81a423ad-b305-48d3-bf01-2ce7bc352c53 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.933358] env[61273]: DEBUG nova.compute.provider_tree [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 951.933805] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg c2041a0830e64bc5b84d1c3bb2355f51 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 951.941151] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c2041a0830e64bc5b84d1c3bb2355f51 [ 952.126413] env[61273]: DEBUG nova.network.neutron [-] [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 952.126915] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 1f02e2f4774240c8a2ea608116e7bc9e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 952.138008] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1f02e2f4774240c8a2ea608116e7bc9e [ 952.386223] env[61273]: INFO nova.scheduler.client.report [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Deleted allocations for instance ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f [ 952.391769] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Expecting reply to msg aa296ace659348a9b5641e110dcbaef0 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 952.407746] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aa296ace659348a9b5641e110dcbaef0 [ 952.436092] env[61273]: DEBUG nova.scheduler.client.report [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 952.438388] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 24b13bf764a544c687794ab5f3ec0475 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 952.449122] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 24b13bf764a544c687794ab5f3ec0475 [ 952.629141] env[61273]: INFO nova.compute.manager [-] [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] Took 1.03 seconds to deallocate network for instance. [ 952.631462] env[61273]: DEBUG nova.compute.claims [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 952.631643] env[61273]: DEBUG oslo_concurrency.lockutils [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 952.893604] env[61273]: DEBUG oslo_concurrency.lockutils [None req-60fc7659-482e-4c84-81ea-6c3b6fd0ca5e tempest-AttachVolumeNegativeTest-1501631824 tempest-AttachVolumeNegativeTest-1501631824-project-member] Lock "ef2a58d8-74ab-4049-94c3-0eb0d5bfee5f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.803s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 952.940533] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61273) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 952.940752] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.712s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 952.941025] env[61273]: DEBUG oslo_concurrency.lockutils [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.411s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 952.942570] env[61273]: INFO nova.compute.claims [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 952.944249] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 6a4aeca30aa443fcbc0eb81dfb09eeb6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 952.989237] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6a4aeca30aa443fcbc0eb81dfb09eeb6 [ 953.447642] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 3e9553f88bf54db2ba868e7c2c60f6f2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 953.455790] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3e9553f88bf54db2ba868e7c2c60f6f2 [ 954.016164] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32fd5ed6-6566-429a-a063-d5873589476a {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.025098] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0feebf43-6e25-4084-98b3-0bcc173ae73f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.056749] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16042d18-b4d7-4847-b58a-682adab78e54 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.064356] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9107804e-678f-4943-bf3e-765cd2ec4fbb {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.079704] env[61273]: DEBUG nova.compute.provider_tree [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 954.080211] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg a3c77f8317844b9f9111a93f1cf54759 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 954.087979] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a3c77f8317844b9f9111a93f1cf54759 [ 954.583296] env[61273]: DEBUG nova.scheduler.client.report [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 954.585724] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg c4d78b4f596540d9843b6eee6f88de40 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 954.593137] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] Acquiring lock "c181e857-2fcf-4658-9e90-fff396a5eaf5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 954.593341] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] Lock "c181e857-2fcf-4658-9e90-fff396a5eaf5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 954.593767] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] Expecting reply to msg 8c6e38adcf9d4bd3bb8069e1fa46b6d6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 954.603685] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c4d78b4f596540d9843b6eee6f88de40 [ 954.604118] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8c6e38adcf9d4bd3bb8069e1fa46b6d6 [ 955.098808] env[61273]: DEBUG oslo_concurrency.lockutils [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.147s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.098808] env[61273]: DEBUG nova.compute.manager [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 955.098808] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg a0dada7c1ebc4ce1bd42b89e0b4146fb in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 955.098808] env[61273]: DEBUG oslo_concurrency.lockutils [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.906s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 955.098808] env[61273]: INFO nova.compute.claims [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 955.099195] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 18688bcb74204380aad47c1ebb45ff68 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 955.099195] env[61273]: DEBUG nova.compute.manager [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 955.099195] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] Expecting reply to msg 4da1f1643c764c67bdc9db19b583a9db in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 955.138860] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 18688bcb74204380aad47c1ebb45ff68 [ 955.152305] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a0dada7c1ebc4ce1bd42b89e0b4146fb [ 955.157179] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4da1f1643c764c67bdc9db19b583a9db [ 955.597656] env[61273]: DEBUG nova.compute.utils [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 955.598376] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg ee8bc2cf93854a5a9a420bded501babe in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 955.599319] env[61273]: DEBUG nova.compute.manager [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 955.599489] env[61273]: DEBUG nova.network.neutron [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 955.602538] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 96fe119e14ea4890b65e01aae1e48116 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 955.607886] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ee8bc2cf93854a5a9a420bded501babe [ 955.610133] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 96fe119e14ea4890b65e01aae1e48116 [ 955.622990] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 955.645750] env[61273]: DEBUG nova.policy [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'af34c4e3d81c4729a9dd4a8531992ff1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd9325f1def284d2a9fdced4e9eeb17f0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 955.887330] env[61273]: DEBUG nova.network.neutron [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] Successfully created port: 34ca79ff-567b-43f8-bc85-994e4b2ce95a {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 956.109042] env[61273]: DEBUG nova.compute.manager [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 956.110972] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg bb0baa92cc3b4982b84e03370217cda3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 956.164285] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bb0baa92cc3b4982b84e03370217cda3 [ 956.193457] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f91ff4c5-8fa0-4f8c-a81a-a9f6ac1978ed {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.201210] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5c18c96-f16e-4d4b-88f3-18cdb5f3c1e8 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.230803] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5064ab18-f11c-4c18-89b5-d8a8ab2ad0dc {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.238104] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7985b2c-f91d-4224-9a9b-219617cd9468 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.253579] env[61273]: DEBUG nova.compute.provider_tree [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 956.255505] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg e72884f379e64660b8b49720268cdee9 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 956.264701] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e72884f379e64660b8b49720268cdee9 [ 956.617235] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 3ea26a84a3394e02a424b9b9805e9070 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 956.652345] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3ea26a84a3394e02a424b9b9805e9070 [ 956.658138] env[61273]: DEBUG nova.compute.manager [req-c0e208e0-5de9-4537-b402-668565148034 req-fb4ace98-fe03-4321-9c4b-afa15ce874eb service nova] [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] Received event network-changed-34ca79ff-567b-43f8-bc85-994e4b2ce95a {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 956.658315] env[61273]: DEBUG nova.compute.manager [req-c0e208e0-5de9-4537-b402-668565148034 req-fb4ace98-fe03-4321-9c4b-afa15ce874eb service nova] [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] Refreshing instance network info cache due to event network-changed-34ca79ff-567b-43f8-bc85-994e4b2ce95a. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 956.658521] env[61273]: DEBUG oslo_concurrency.lockutils [req-c0e208e0-5de9-4537-b402-668565148034 req-fb4ace98-fe03-4321-9c4b-afa15ce874eb service nova] Acquiring lock "refresh_cache-a007a7bc-b898-472b-9469-e6dd6262ea0b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 956.658656] env[61273]: DEBUG oslo_concurrency.lockutils [req-c0e208e0-5de9-4537-b402-668565148034 req-fb4ace98-fe03-4321-9c4b-afa15ce874eb service nova] Acquired lock "refresh_cache-a007a7bc-b898-472b-9469-e6dd6262ea0b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.658821] env[61273]: DEBUG nova.network.neutron [req-c0e208e0-5de9-4537-b402-668565148034 req-fb4ace98-fe03-4321-9c4b-afa15ce874eb service nova] [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] Refreshing network info cache for port 34ca79ff-567b-43f8-bc85-994e4b2ce95a {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 956.659221] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-c0e208e0-5de9-4537-b402-668565148034 req-fb4ace98-fe03-4321-9c4b-afa15ce874eb service nova] Expecting reply to msg 47096ca6c8f94048b17f5f42e3d44ddd in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 956.669283] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 47096ca6c8f94048b17f5f42e3d44ddd [ 956.757751] env[61273]: DEBUG nova.scheduler.client.report [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 956.760439] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg ecb05722cf2746ac884fe122b44589df in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 956.801916] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ecb05722cf2746ac884fe122b44589df [ 956.884921] env[61273]: ERROR nova.compute.manager [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 34ca79ff-567b-43f8-bc85-994e4b2ce95a, please check neutron logs for more information. [ 956.884921] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 956.884921] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 956.884921] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 956.884921] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 956.884921] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 956.884921] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 956.884921] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 956.884921] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 956.884921] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 956.884921] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 956.884921] env[61273]: ERROR nova.compute.manager raise self.value [ 956.884921] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 956.884921] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 956.884921] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 956.884921] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 956.885565] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 956.885565] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 956.885565] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 34ca79ff-567b-43f8-bc85-994e4b2ce95a, please check neutron logs for more information. [ 956.885565] env[61273]: ERROR nova.compute.manager [ 956.885565] env[61273]: Traceback (most recent call last): [ 956.885565] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 956.885565] env[61273]: listener.cb(fileno) [ 956.885565] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 956.885565] env[61273]: result = function(*args, **kwargs) [ 956.885565] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 956.885565] env[61273]: return func(*args, **kwargs) [ 956.885565] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 956.885565] env[61273]: raise e [ 956.885565] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 956.885565] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 956.885565] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 956.885565] env[61273]: created_port_ids = self._update_ports_for_instance( [ 956.885565] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 956.885565] env[61273]: with excutils.save_and_reraise_exception(): [ 956.885565] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 956.885565] env[61273]: self.force_reraise() [ 956.885565] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 956.885565] env[61273]: raise self.value [ 956.885565] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 956.885565] env[61273]: updated_port = self._update_port( [ 956.885565] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 956.885565] env[61273]: _ensure_no_port_binding_failure(port) [ 956.885565] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 956.885565] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 956.886629] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 34ca79ff-567b-43f8-bc85-994e4b2ce95a, please check neutron logs for more information. [ 956.886629] env[61273]: Removing descriptor: 19 [ 957.121025] env[61273]: DEBUG nova.compute.manager [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 957.146050] env[61273]: DEBUG nova.virt.hardware [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 957.146419] env[61273]: DEBUG nova.virt.hardware [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 957.146664] env[61273]: DEBUG nova.virt.hardware [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 957.146927] env[61273]: DEBUG nova.virt.hardware [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 957.147145] env[61273]: DEBUG nova.virt.hardware [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 957.147362] env[61273]: DEBUG nova.virt.hardware [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 957.147632] env[61273]: DEBUG nova.virt.hardware [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 957.147855] env[61273]: DEBUG nova.virt.hardware [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 957.148159] env[61273]: DEBUG nova.virt.hardware [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 957.148397] env[61273]: DEBUG nova.virt.hardware [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 957.149596] env[61273]: DEBUG nova.virt.hardware [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 957.150929] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae640c0b-6419-4668-a385-bd1830666bb8 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.158632] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21c8e2f9-0ed6-422f-af9f-34e54510f9cb {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.174100] env[61273]: ERROR nova.compute.manager [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 34ca79ff-567b-43f8-bc85-994e4b2ce95a, please check neutron logs for more information. [ 957.174100] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] Traceback (most recent call last): [ 957.174100] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 957.174100] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] yield resources [ 957.174100] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 957.174100] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] self.driver.spawn(context, instance, image_meta, [ 957.174100] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 957.174100] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 957.174100] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 957.174100] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] vm_ref = self.build_virtual_machine(instance, [ 957.174100] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 957.174592] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] vif_infos = vmwarevif.get_vif_info(self._session, [ 957.174592] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 957.174592] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] for vif in network_info: [ 957.174592] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 957.174592] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] return self._sync_wrapper(fn, *args, **kwargs) [ 957.174592] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 957.174592] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] self.wait() [ 957.174592] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 957.174592] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] self[:] = self._gt.wait() [ 957.174592] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 957.174592] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] return self._exit_event.wait() [ 957.174592] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 957.174592] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] current.throw(*self._exc) [ 957.175058] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 957.175058] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] result = function(*args, **kwargs) [ 957.175058] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 957.175058] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] return func(*args, **kwargs) [ 957.175058] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 957.175058] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] raise e [ 957.175058] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 957.175058] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] nwinfo = self.network_api.allocate_for_instance( [ 957.175058] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 957.175058] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] created_port_ids = self._update_ports_for_instance( [ 957.175058] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 957.175058] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] with excutils.save_and_reraise_exception(): [ 957.175058] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 957.175505] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] self.force_reraise() [ 957.175505] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 957.175505] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] raise self.value [ 957.175505] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 957.175505] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] updated_port = self._update_port( [ 957.175505] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 957.175505] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] _ensure_no_port_binding_failure(port) [ 957.175505] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 957.175505] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] raise exception.PortBindingFailed(port_id=port['id']) [ 957.175505] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] nova.exception.PortBindingFailed: Binding failed for port 34ca79ff-567b-43f8-bc85-994e4b2ce95a, please check neutron logs for more information. [ 957.175505] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] [ 957.175505] env[61273]: INFO nova.compute.manager [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] Terminating instance [ 957.177015] env[61273]: DEBUG oslo_concurrency.lockutils [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Acquiring lock "refresh_cache-a007a7bc-b898-472b-9469-e6dd6262ea0b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 957.178379] env[61273]: DEBUG nova.network.neutron [req-c0e208e0-5de9-4537-b402-668565148034 req-fb4ace98-fe03-4321-9c4b-afa15ce874eb service nova] [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 957.260256] env[61273]: DEBUG nova.network.neutron [req-c0e208e0-5de9-4537-b402-668565148034 req-fb4ace98-fe03-4321-9c4b-afa15ce874eb service nova] [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 957.260903] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-c0e208e0-5de9-4537-b402-668565148034 req-fb4ace98-fe03-4321-9c4b-afa15ce874eb service nova] Expecting reply to msg faa549ceb4ec46c99723c43934349de6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 957.262355] env[61273]: DEBUG oslo_concurrency.lockutils [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.170s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 957.262921] env[61273]: DEBUG nova.compute.manager [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 957.264875] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg c19c095a65364c4498c499729ac52649 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 957.266229] env[61273]: DEBUG oslo_concurrency.lockutils [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 8.720s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 957.268101] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg aa659585829c45f98cd29fc74b0c7fa1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 957.280125] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg faa549ceb4ec46c99723c43934349de6 [ 957.310085] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c19c095a65364c4498c499729ac52649 [ 957.311208] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aa659585829c45f98cd29fc74b0c7fa1 [ 957.767368] env[61273]: DEBUG oslo_concurrency.lockutils [req-c0e208e0-5de9-4537-b402-668565148034 req-fb4ace98-fe03-4321-9c4b-afa15ce874eb service nova] Releasing lock "refresh_cache-a007a7bc-b898-472b-9469-e6dd6262ea0b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 957.767763] env[61273]: DEBUG oslo_concurrency.lockutils [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Acquired lock "refresh_cache-a007a7bc-b898-472b-9469-e6dd6262ea0b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 957.767956] env[61273]: DEBUG nova.network.neutron [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 957.768419] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 513961c2ae254dedbd67f9ba1347b956 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 957.770605] env[61273]: DEBUG nova.compute.utils [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 957.771155] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 7eda5985220f48a19d32e359b9220dd9 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 957.775531] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 513961c2ae254dedbd67f9ba1347b956 [ 957.776339] env[61273]: DEBUG nova.compute.manager [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 957.776339] env[61273]: DEBUG nova.network.neutron [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 957.780813] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7eda5985220f48a19d32e359b9220dd9 [ 957.813092] env[61273]: DEBUG nova.policy [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8969ac54b88a47028e5784f6575f2d61', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '63d2fb7de0ad453dbe6891e6974f1b66', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 957.852841] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ef772ca-80ab-40cb-9dc7-671e1510f4f4 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.861975] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-172b4898-cdc2-4df4-a28c-9562c7a8bac9 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.893239] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e750b56-182f-4b00-80cc-f4d981bcf161 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.902012] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fd68930-92d3-4241-b7d6-e15e55559912 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.918472] env[61273]: DEBUG nova.compute.provider_tree [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 957.919163] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg eb5b1685157a401796497f30fa2b7c17 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 957.926121] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eb5b1685157a401796497f30fa2b7c17 [ 958.089555] env[61273]: DEBUG nova.network.neutron [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] Successfully created port: 818e2199-9d00-48ea-a71b-1cb7011eae1e {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 958.278613] env[61273]: DEBUG nova.compute.manager [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 958.280925] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 7e40ed38f64542a2a90db436fb8585e0 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 958.293292] env[61273]: DEBUG nova.network.neutron [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 958.316795] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7e40ed38f64542a2a90db436fb8585e0 [ 958.362154] env[61273]: DEBUG nova.network.neutron [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 958.362696] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 6b155b4f6deb4fb1b6bfc3f8599a52db in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 958.370756] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6b155b4f6deb4fb1b6bfc3f8599a52db [ 958.421616] env[61273]: DEBUG nova.scheduler.client.report [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 958.424134] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg c49737c71df3456d8044bacb82937ef2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 958.436388] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c49737c71df3456d8044bacb82937ef2 [ 958.688292] env[61273]: DEBUG nova.compute.manager [req-22e8d5db-336c-451f-af34-59d9fac06f2a req-d3873a32-11db-40d2-b6d2-b446d90c7bc3 service nova] [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] Received event network-vif-deleted-34ca79ff-567b-43f8-bc85-994e4b2ce95a {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 958.796154] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 4e9aa052edfa4843b731d7328aaf0b5e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 958.830813] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4e9aa052edfa4843b731d7328aaf0b5e [ 958.865187] env[61273]: DEBUG oslo_concurrency.lockutils [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Releasing lock "refresh_cache-a007a7bc-b898-472b-9469-e6dd6262ea0b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 958.865605] env[61273]: DEBUG nova.compute.manager [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 958.865793] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 958.866078] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-226ec2cf-b7a6-47a5-a178-e72b87ae1d40 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.875895] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10891b6b-aa38-4292-a384-0340a61d7413 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.897938] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a007a7bc-b898-472b-9469-e6dd6262ea0b could not be found. [ 958.898181] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 958.898361] env[61273]: INFO nova.compute.manager [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] Took 0.03 seconds to destroy the instance on the hypervisor. [ 958.898602] env[61273]: DEBUG oslo.service.loopingcall [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 958.899464] env[61273]: ERROR nova.compute.manager [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 818e2199-9d00-48ea-a71b-1cb7011eae1e, please check neutron logs for more information. [ 958.899464] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 958.899464] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 958.899464] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 958.899464] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 958.899464] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 958.899464] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 958.899464] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 958.899464] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 958.899464] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 958.899464] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 958.899464] env[61273]: ERROR nova.compute.manager raise self.value [ 958.899464] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 958.899464] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 958.899464] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 958.899464] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 958.900184] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 958.900184] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 958.900184] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 818e2199-9d00-48ea-a71b-1cb7011eae1e, please check neutron logs for more information. [ 958.900184] env[61273]: ERROR nova.compute.manager [ 958.900184] env[61273]: Traceback (most recent call last): [ 958.900184] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 958.900184] env[61273]: listener.cb(fileno) [ 958.900184] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 958.900184] env[61273]: result = function(*args, **kwargs) [ 958.900184] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 958.900184] env[61273]: return func(*args, **kwargs) [ 958.900184] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 958.900184] env[61273]: raise e [ 958.900184] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 958.900184] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 958.900184] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 958.900184] env[61273]: created_port_ids = self._update_ports_for_instance( [ 958.900184] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 958.900184] env[61273]: with excutils.save_and_reraise_exception(): [ 958.900184] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 958.900184] env[61273]: self.force_reraise() [ 958.900184] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 958.900184] env[61273]: raise self.value [ 958.900184] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 958.900184] env[61273]: updated_port = self._update_port( [ 958.900184] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 958.900184] env[61273]: _ensure_no_port_binding_failure(port) [ 958.900184] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 958.900184] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 958.901440] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 818e2199-9d00-48ea-a71b-1cb7011eae1e, please check neutron logs for more information. [ 958.901440] env[61273]: Removing descriptor: 19 [ 958.901440] env[61273]: DEBUG nova.compute.manager [-] [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 958.901440] env[61273]: DEBUG nova.network.neutron [-] [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 958.913388] env[61273]: DEBUG nova.network.neutron [-] [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 958.913827] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 68ba648387ae4d1f885bfe25cac817c7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 958.921290] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 68ba648387ae4d1f885bfe25cac817c7 [ 958.925962] env[61273]: DEBUG oslo_concurrency.lockutils [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.660s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 958.926537] env[61273]: ERROR nova.compute.manager [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port c5cb1417-be25-47df-b3cd-5ad25361a83b, please check neutron logs for more information. [ 958.926537] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] Traceback (most recent call last): [ 958.926537] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 958.926537] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] self.driver.spawn(context, instance, image_meta, [ 958.926537] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 958.926537] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 958.926537] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 958.926537] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] vm_ref = self.build_virtual_machine(instance, [ 958.926537] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 958.926537] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] vif_infos = vmwarevif.get_vif_info(self._session, [ 958.926537] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 958.927047] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] for vif in network_info: [ 958.927047] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 958.927047] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] return self._sync_wrapper(fn, *args, **kwargs) [ 958.927047] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 958.927047] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] self.wait() [ 958.927047] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 958.927047] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] self[:] = self._gt.wait() [ 958.927047] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 958.927047] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] return self._exit_event.wait() [ 958.927047] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 958.927047] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] current.throw(*self._exc) [ 958.927047] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 958.927047] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] result = function(*args, **kwargs) [ 958.927684] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 958.927684] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] return func(*args, **kwargs) [ 958.927684] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 958.927684] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] raise e [ 958.927684] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 958.927684] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] nwinfo = self.network_api.allocate_for_instance( [ 958.927684] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 958.927684] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] created_port_ids = self._update_ports_for_instance( [ 958.927684] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 958.927684] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] with excutils.save_and_reraise_exception(): [ 958.927684] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 958.927684] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] self.force_reraise() [ 958.927684] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 958.928331] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] raise self.value [ 958.928331] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 958.928331] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] updated_port = self._update_port( [ 958.928331] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 958.928331] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] _ensure_no_port_binding_failure(port) [ 958.928331] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 958.928331] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] raise exception.PortBindingFailed(port_id=port['id']) [ 958.928331] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] nova.exception.PortBindingFailed: Binding failed for port c5cb1417-be25-47df-b3cd-5ad25361a83b, please check neutron logs for more information. [ 958.928331] env[61273]: ERROR nova.compute.manager [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] [ 958.928331] env[61273]: DEBUG nova.compute.utils [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] Binding failed for port c5cb1417-be25-47df-b3cd-5ad25361a83b, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 958.928808] env[61273]: DEBUG oslo_concurrency.lockutils [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 6.297s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 958.930599] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Expecting reply to msg 4ec05d17027d4589a6fba1c0187f7bbe in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 958.932199] env[61273]: DEBUG nova.compute.manager [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] Build of instance 4196c9a3-53d2-4a6d-b944-813be319cbdd was re-scheduled: Binding failed for port c5cb1417-be25-47df-b3cd-5ad25361a83b, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 958.932682] env[61273]: DEBUG nova.compute.manager [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 958.932916] env[61273]: DEBUG oslo_concurrency.lockutils [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Acquiring lock "refresh_cache-4196c9a3-53d2-4a6d-b944-813be319cbdd" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 958.933089] env[61273]: DEBUG oslo_concurrency.lockutils [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Acquired lock "refresh_cache-4196c9a3-53d2-4a6d-b944-813be319cbdd" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.934481] env[61273]: DEBUG nova.network.neutron [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 958.935370] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg ecde7389859b4ba68c7e3db015a3526c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 958.944607] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ecde7389859b4ba68c7e3db015a3526c [ 958.988643] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4ec05d17027d4589a6fba1c0187f7bbe [ 959.299718] env[61273]: DEBUG nova.compute.manager [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 959.324065] env[61273]: DEBUG nova.virt.hardware [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 959.324332] env[61273]: DEBUG nova.virt.hardware [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 959.324490] env[61273]: DEBUG nova.virt.hardware [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 959.324671] env[61273]: DEBUG nova.virt.hardware [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 959.324816] env[61273]: DEBUG nova.virt.hardware [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 959.324963] env[61273]: DEBUG nova.virt.hardware [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 959.325166] env[61273]: DEBUG nova.virt.hardware [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 959.325327] env[61273]: DEBUG nova.virt.hardware [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 959.325474] env[61273]: DEBUG nova.virt.hardware [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 959.325631] env[61273]: DEBUG nova.virt.hardware [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 959.325798] env[61273]: DEBUG nova.virt.hardware [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 959.326627] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9b416c5-e6a1-46cc-a7a2-4efcb5f721fc {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.334624] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51f6756f-61fa-4e6f-bd08-8c342e639cc1 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.347762] env[61273]: ERROR nova.compute.manager [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 818e2199-9d00-48ea-a71b-1cb7011eae1e, please check neutron logs for more information. [ 959.347762] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] Traceback (most recent call last): [ 959.347762] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 959.347762] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] yield resources [ 959.347762] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 959.347762] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] self.driver.spawn(context, instance, image_meta, [ 959.347762] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 959.347762] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 959.347762] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 959.347762] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] vm_ref = self.build_virtual_machine(instance, [ 959.347762] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 959.348322] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] vif_infos = vmwarevif.get_vif_info(self._session, [ 959.348322] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 959.348322] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] for vif in network_info: [ 959.348322] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 959.348322] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] return self._sync_wrapper(fn, *args, **kwargs) [ 959.348322] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 959.348322] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] self.wait() [ 959.348322] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 959.348322] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] self[:] = self._gt.wait() [ 959.348322] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 959.348322] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] return self._exit_event.wait() [ 959.348322] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 959.348322] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] current.throw(*self._exc) [ 959.348912] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 959.348912] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] result = function(*args, **kwargs) [ 959.348912] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 959.348912] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] return func(*args, **kwargs) [ 959.348912] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 959.348912] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] raise e [ 959.348912] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 959.348912] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] nwinfo = self.network_api.allocate_for_instance( [ 959.348912] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 959.348912] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] created_port_ids = self._update_ports_for_instance( [ 959.348912] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 959.348912] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] with excutils.save_and_reraise_exception(): [ 959.348912] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 959.349569] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] self.force_reraise() [ 959.349569] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 959.349569] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] raise self.value [ 959.349569] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 959.349569] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] updated_port = self._update_port( [ 959.349569] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 959.349569] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] _ensure_no_port_binding_failure(port) [ 959.349569] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 959.349569] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] raise exception.PortBindingFailed(port_id=port['id']) [ 959.349569] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] nova.exception.PortBindingFailed: Binding failed for port 818e2199-9d00-48ea-a71b-1cb7011eae1e, please check neutron logs for more information. [ 959.349569] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] [ 959.349569] env[61273]: INFO nova.compute.manager [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] Terminating instance [ 959.350182] env[61273]: DEBUG oslo_concurrency.lockutils [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Acquiring lock "refresh_cache-c24218f8-6989-4e2f-88b4-f4421b66ec3e" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 959.350182] env[61273]: DEBUG oslo_concurrency.lockutils [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Acquired lock "refresh_cache-c24218f8-6989-4e2f-88b4-f4421b66ec3e" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 959.350182] env[61273]: DEBUG nova.network.neutron [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 959.350554] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 8f75ff9a16f64542b1c850b50e3ed6e2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 959.357383] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8f75ff9a16f64542b1c850b50e3ed6e2 [ 959.419386] env[61273]: DEBUG nova.network.neutron [-] [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 959.419871] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg e77818b8087042abb68d60bbea247c97 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 959.427773] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e77818b8087042abb68d60bbea247c97 [ 959.479664] env[61273]: DEBUG nova.network.neutron [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 959.536353] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b01a60d2-eea4-40cb-8d86-7e78b021fc1f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.545954] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bad6450c-1f74-47ab-b77b-37650af582c6 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.575962] env[61273]: DEBUG nova.network.neutron [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 959.576515] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg cfbcca6d0e164bf29b3401514275c611 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 959.578549] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be06980b-80ae-4f3a-b982-e5483eefb6d3 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.586089] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30ec58c7-8e1e-4e4d-ade4-eb5063373cdc {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.590477] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cfbcca6d0e164bf29b3401514275c611 [ 959.601435] env[61273]: DEBUG nova.compute.provider_tree [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 959.601936] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Expecting reply to msg fef5831efbc84ce695ce2ec7d216e3ff in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 959.608254] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fef5831efbc84ce695ce2ec7d216e3ff [ 959.868980] env[61273]: DEBUG nova.network.neutron [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 959.923300] env[61273]: INFO nova.compute.manager [-] [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] Took 1.02 seconds to deallocate network for instance. [ 959.925671] env[61273]: DEBUG nova.compute.claims [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 959.926024] env[61273]: DEBUG oslo_concurrency.lockutils [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 959.929781] env[61273]: DEBUG nova.network.neutron [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 959.930230] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 4e7eadc8059b4a6b9237f5d22f3e6e6a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 959.938275] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4e7eadc8059b4a6b9237f5d22f3e6e6a [ 960.078583] env[61273]: DEBUG oslo_concurrency.lockutils [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Releasing lock "refresh_cache-4196c9a3-53d2-4a6d-b944-813be319cbdd" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 960.078825] env[61273]: DEBUG nova.compute.manager [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 960.079012] env[61273]: DEBUG nova.compute.manager [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 960.079176] env[61273]: DEBUG nova.network.neutron [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 960.093532] env[61273]: DEBUG nova.network.neutron [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 960.094192] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 135d6f6c98a4413aacb1641a780a4aca in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 960.100654] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 135d6f6c98a4413aacb1641a780a4aca [ 960.103880] env[61273]: DEBUG nova.scheduler.client.report [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 960.106422] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Expecting reply to msg 79804cf0ef404e92bb3ebae8bead04f4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 960.116853] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 79804cf0ef404e92bb3ebae8bead04f4 [ 960.432831] env[61273]: DEBUG oslo_concurrency.lockutils [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Releasing lock "refresh_cache-c24218f8-6989-4e2f-88b4-f4421b66ec3e" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 960.433298] env[61273]: DEBUG nova.compute.manager [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 960.433480] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 960.433828] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d0134503-b1eb-407e-9087-62c06e5e6277 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.443018] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5392500f-df54-48a7-8c9a-82110e44aec9 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.465249] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c24218f8-6989-4e2f-88b4-f4421b66ec3e could not be found. [ 960.465249] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 960.465434] env[61273]: INFO nova.compute.manager [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] Took 0.03 seconds to destroy the instance on the hypervisor. [ 960.465678] env[61273]: DEBUG oslo.service.loopingcall [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 960.465825] env[61273]: DEBUG nova.compute.manager [-] [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 960.465884] env[61273]: DEBUG nova.network.neutron [-] [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 960.480294] env[61273]: DEBUG nova.network.neutron [-] [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 960.480762] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 0f003903ac634451b4ae918270690a18 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 960.487214] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0f003903ac634451b4ae918270690a18 [ 960.525467] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 59b3103833af4aec8cc040fcb82eb74d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 960.537998] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 59b3103833af4aec8cc040fcb82eb74d [ 960.596595] env[61273]: DEBUG nova.network.neutron [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 960.597200] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 6b8666b505904c3690ac71d723b8e913 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 960.605000] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6b8666b505904c3690ac71d723b8e913 [ 960.608079] env[61273]: DEBUG oslo_concurrency.lockutils [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.679s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 960.608669] env[61273]: ERROR nova.compute.manager [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port bc0d1c65-eaa1-4791-8213-aad49456dbfb, please check neutron logs for more information. [ 960.608669] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] Traceback (most recent call last): [ 960.608669] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 960.608669] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] self.driver.spawn(context, instance, image_meta, [ 960.608669] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 960.608669] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] self._vmops.spawn(context, instance, image_meta, injected_files, [ 960.608669] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 960.608669] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] vm_ref = self.build_virtual_machine(instance, [ 960.608669] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 960.608669] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] vif_infos = vmwarevif.get_vif_info(self._session, [ 960.608669] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 960.609130] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] for vif in network_info: [ 960.609130] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 960.609130] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] return self._sync_wrapper(fn, *args, **kwargs) [ 960.609130] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 960.609130] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] self.wait() [ 960.609130] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 960.609130] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] self[:] = self._gt.wait() [ 960.609130] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 960.609130] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] return self._exit_event.wait() [ 960.609130] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 960.609130] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] current.throw(*self._exc) [ 960.609130] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 960.609130] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] result = function(*args, **kwargs) [ 960.609580] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 960.609580] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] return func(*args, **kwargs) [ 960.609580] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 960.609580] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] raise e [ 960.609580] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 960.609580] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] nwinfo = self.network_api.allocate_for_instance( [ 960.609580] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 960.609580] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] created_port_ids = self._update_ports_for_instance( [ 960.609580] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 960.609580] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] with excutils.save_and_reraise_exception(): [ 960.609580] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 960.609580] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] self.force_reraise() [ 960.609580] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 960.610023] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] raise self.value [ 960.610023] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 960.610023] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] updated_port = self._update_port( [ 960.610023] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 960.610023] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] _ensure_no_port_binding_failure(port) [ 960.610023] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 960.610023] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] raise exception.PortBindingFailed(port_id=port['id']) [ 960.610023] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] nova.exception.PortBindingFailed: Binding failed for port bc0d1c65-eaa1-4791-8213-aad49456dbfb, please check neutron logs for more information. [ 960.610023] env[61273]: ERROR nova.compute.manager [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] [ 960.610023] env[61273]: DEBUG nova.compute.utils [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] Binding failed for port bc0d1c65-eaa1-4791-8213-aad49456dbfb, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 960.610459] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.988s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 960.612019] env[61273]: INFO nova.compute.claims [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 960.613656] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] Expecting reply to msg ddcbbcd25426471b91e9dafed982feaa in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 960.614897] env[61273]: DEBUG nova.compute.manager [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] Build of instance 57a60fa5-be59-48b8-b72a-6f7d945ba821 was re-scheduled: Binding failed for port bc0d1c65-eaa1-4791-8213-aad49456dbfb, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 960.615317] env[61273]: DEBUG nova.compute.manager [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 960.615532] env[61273]: DEBUG oslo_concurrency.lockutils [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Acquiring lock "refresh_cache-57a60fa5-be59-48b8-b72a-6f7d945ba821" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 960.615677] env[61273]: DEBUG oslo_concurrency.lockutils [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Acquired lock "refresh_cache-57a60fa5-be59-48b8-b72a-6f7d945ba821" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 960.615832] env[61273]: DEBUG nova.network.neutron [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 960.616199] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Expecting reply to msg b01d18062af64b1d97c8498f8b9a2736 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 960.627869] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b01d18062af64b1d97c8498f8b9a2736 [ 960.645125] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ddcbbcd25426471b91e9dafed982feaa [ 960.711598] env[61273]: DEBUG nova.compute.manager [req-13a9ad68-019a-41d0-90cc-c788a89119e3 req-46aac580-2363-4282-81c7-fdcaff7ee0b3 service nova] [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] Received event network-changed-818e2199-9d00-48ea-a71b-1cb7011eae1e {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 960.711820] env[61273]: DEBUG nova.compute.manager [req-13a9ad68-019a-41d0-90cc-c788a89119e3 req-46aac580-2363-4282-81c7-fdcaff7ee0b3 service nova] [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] Refreshing instance network info cache due to event network-changed-818e2199-9d00-48ea-a71b-1cb7011eae1e. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 960.712051] env[61273]: DEBUG oslo_concurrency.lockutils [req-13a9ad68-019a-41d0-90cc-c788a89119e3 req-46aac580-2363-4282-81c7-fdcaff7ee0b3 service nova] Acquiring lock "refresh_cache-c24218f8-6989-4e2f-88b4-f4421b66ec3e" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 960.712202] env[61273]: DEBUG oslo_concurrency.lockutils [req-13a9ad68-019a-41d0-90cc-c788a89119e3 req-46aac580-2363-4282-81c7-fdcaff7ee0b3 service nova] Acquired lock "refresh_cache-c24218f8-6989-4e2f-88b4-f4421b66ec3e" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 960.712361] env[61273]: DEBUG nova.network.neutron [req-13a9ad68-019a-41d0-90cc-c788a89119e3 req-46aac580-2363-4282-81c7-fdcaff7ee0b3 service nova] [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] Refreshing network info cache for port 818e2199-9d00-48ea-a71b-1cb7011eae1e {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 960.712766] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-13a9ad68-019a-41d0-90cc-c788a89119e3 req-46aac580-2363-4282-81c7-fdcaff7ee0b3 service nova] Expecting reply to msg a0d7af3f3e72450da39da541d22db6e5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 960.719083] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a0d7af3f3e72450da39da541d22db6e5 [ 960.982552] env[61273]: DEBUG nova.network.neutron [-] [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 960.983015] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 237b36c215894e179b6f48299ddfb926 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 960.990713] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 237b36c215894e179b6f48299ddfb926 [ 961.098892] env[61273]: INFO nova.compute.manager [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] [instance: 4196c9a3-53d2-4a6d-b944-813be319cbdd] Took 1.02 seconds to deallocate network for instance. [ 961.100602] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 59ce57e365c748dca43523ff6e7e64a1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 961.118711] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] Expecting reply to msg 05be7363cb804aeba1ccdee9ad183d36 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 961.125959] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 05be7363cb804aeba1ccdee9ad183d36 [ 961.130527] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 59ce57e365c748dca43523ff6e7e64a1 [ 961.137851] env[61273]: DEBUG nova.network.neutron [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 961.197217] env[61273]: DEBUG nova.network.neutron [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 961.197699] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Expecting reply to msg b0d5c50f48be40e7b964de147f3c1e70 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 961.205026] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b0d5c50f48be40e7b964de147f3c1e70 [ 961.226328] env[61273]: DEBUG nova.network.neutron [req-13a9ad68-019a-41d0-90cc-c788a89119e3 req-46aac580-2363-4282-81c7-fdcaff7ee0b3 service nova] [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 961.283418] env[61273]: DEBUG nova.network.neutron [req-13a9ad68-019a-41d0-90cc-c788a89119e3 req-46aac580-2363-4282-81c7-fdcaff7ee0b3 service nova] [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 961.283870] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-13a9ad68-019a-41d0-90cc-c788a89119e3 req-46aac580-2363-4282-81c7-fdcaff7ee0b3 service nova] Expecting reply to msg 2833d06169d4425489c98f94560668ad in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 961.291278] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2833d06169d4425489c98f94560668ad [ 961.485452] env[61273]: INFO nova.compute.manager [-] [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] Took 1.02 seconds to deallocate network for instance. [ 961.487686] env[61273]: DEBUG nova.compute.claims [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 961.487900] env[61273]: DEBUG oslo_concurrency.lockutils [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 961.605114] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg c96254ad8d284d9c96fe5fe128356025 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 961.634045] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c96254ad8d284d9c96fe5fe128356025 [ 961.683355] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf1a3181-a00c-4d4e-8bc4-c93b14e9b6eb {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.691059] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da7bae58-9ab2-4b5f-9bc6-f0931abf5c37 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.720268] env[61273]: DEBUG oslo_concurrency.lockutils [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Releasing lock "refresh_cache-57a60fa5-be59-48b8-b72a-6f7d945ba821" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 961.720490] env[61273]: DEBUG nova.compute.manager [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 961.720665] env[61273]: DEBUG nova.compute.manager [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 961.720825] env[61273]: DEBUG nova.network.neutron [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 961.723459] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bbd9a31-33d6-4fdc-be28-6ed2ca72b38c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.732534] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d2ed705-abe6-4f5a-8ccf-062e5d8691ed {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.737356] env[61273]: DEBUG nova.network.neutron [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 961.737933] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Expecting reply to msg 9e103b80d03445d5882eb34fc33a2bf8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 961.746156] env[61273]: DEBUG nova.compute.provider_tree [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 961.746892] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] Expecting reply to msg 70effd8a6f774a8fbfb9fc75ce3b7323 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 961.748001] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9e103b80d03445d5882eb34fc33a2bf8 [ 961.754458] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 70effd8a6f774a8fbfb9fc75ce3b7323 [ 961.786206] env[61273]: DEBUG oslo_concurrency.lockutils [req-13a9ad68-019a-41d0-90cc-c788a89119e3 req-46aac580-2363-4282-81c7-fdcaff7ee0b3 service nova] Releasing lock "refresh_cache-c24218f8-6989-4e2f-88b4-f4421b66ec3e" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 961.786705] env[61273]: DEBUG nova.compute.manager [req-13a9ad68-019a-41d0-90cc-c788a89119e3 req-46aac580-2363-4282-81c7-fdcaff7ee0b3 service nova] [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] Received event network-vif-deleted-818e2199-9d00-48ea-a71b-1cb7011eae1e {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 962.126503] env[61273]: INFO nova.scheduler.client.report [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Deleted allocations for instance 4196c9a3-53d2-4a6d-b944-813be319cbdd [ 962.133340] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Expecting reply to msg 160d925f121249c2891833c9d1f32a5d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 962.151561] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 160d925f121249c2891833c9d1f32a5d [ 962.248559] env[61273]: DEBUG nova.network.neutron [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 962.249287] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Expecting reply to msg e554f15748a049809fe817a01eb7b52e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 962.250791] env[61273]: DEBUG nova.scheduler.client.report [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 962.253215] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] Expecting reply to msg f226cbf129c84ae981e7e802797d7f9c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 962.256593] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e554f15748a049809fe817a01eb7b52e [ 962.262461] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f226cbf129c84ae981e7e802797d7f9c [ 962.635400] env[61273]: DEBUG oslo_concurrency.lockutils [None req-19cb45ab-3f47-4269-9973-b7c485bfaebc tempest-DeleteServersTestJSON-485078453 tempest-DeleteServersTestJSON-485078453-project-member] Lock "4196c9a3-53d2-4a6d-b944-813be319cbdd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.300s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 962.755601] env[61273]: INFO nova.compute.manager [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] [instance: 57a60fa5-be59-48b8-b72a-6f7d945ba821] Took 1.03 seconds to deallocate network for instance. [ 962.757673] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Expecting reply to msg 13e0c36625de4eb49c7cde6f6de3885f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 962.759627] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.149s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 962.760417] env[61273]: DEBUG nova.compute.manager [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 962.762380] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] Expecting reply to msg e72985e7b1c74544971750cb8a634cf4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 962.765293] env[61273]: DEBUG oslo_concurrency.lockutils [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 2.839s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 962.768072] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg b277ef204ebb4fdaa698792075676f3a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 962.796038] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e72985e7b1c74544971750cb8a634cf4 [ 962.809232] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 13e0c36625de4eb49c7cde6f6de3885f [ 962.814650] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b277ef204ebb4fdaa698792075676f3a [ 963.269825] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Expecting reply to msg bac3fb744b6d4a1da3a773e7d4aff77a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 963.272048] env[61273]: DEBUG nova.compute.utils [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 963.272631] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] Expecting reply to msg 335117037df744a8b5ffa368911c1884 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 963.276078] env[61273]: DEBUG nova.compute.manager [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 963.276078] env[61273]: DEBUG nova.network.neutron [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 963.287233] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 335117037df744a8b5ffa368911c1884 [ 963.307419] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bac3fb744b6d4a1da3a773e7d4aff77a [ 963.340659] env[61273]: DEBUG nova.policy [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4551ac61407a4b5297d301e11c44f5fc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c4985026e03c486597c6f12f54dfdd56', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 963.346108] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c8feba0-270f-4bd5-be1f-4b37aa17ab3d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.353218] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97d61f32-9fc0-475e-ae87-33ecb724cfd4 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.382833] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db93d686-4c0f-40d3-bb5e-2bd4033971de {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.389652] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3a983ab-4d62-46bc-9b10-a4eec99cd4a9 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.402219] env[61273]: DEBUG nova.compute.provider_tree [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 963.402693] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg d3a79f0c03624e3d89d213e26096f669 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 963.410229] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d3a79f0c03624e3d89d213e26096f669 [ 963.777244] env[61273]: DEBUG nova.compute.manager [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 963.779002] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] Expecting reply to msg 7de50c3cfb8245168e3a5b67faf4d486 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 963.799178] env[61273]: INFO nova.scheduler.client.report [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Deleted allocations for instance 57a60fa5-be59-48b8-b72a-6f7d945ba821 [ 963.805307] env[61273]: DEBUG nova.network.neutron [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] Successfully created port: 0b50a699-916e-4d70-a670-340d103c5ef6 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 963.807396] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Expecting reply to msg 41d7d96255fc4c1dbe9c86546513e376 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 963.822578] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7de50c3cfb8245168e3a5b67faf4d486 [ 963.829868] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 41d7d96255fc4c1dbe9c86546513e376 [ 963.905585] env[61273]: DEBUG nova.scheduler.client.report [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 963.909889] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 87573ad73a794c788b1b5adb28297f14 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 963.920606] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 87573ad73a794c788b1b5adb28297f14 [ 964.284553] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] Expecting reply to msg c7a323285fbd42528a7dd4c2dfe3efcd in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 964.309680] env[61273]: DEBUG oslo_concurrency.lockutils [None req-9fcb21cd-ab67-433a-b992-036f760e3431 tempest-AttachVolumeTestJSON-1728706733 tempest-AttachVolumeTestJSON-1728706733-project-member] Lock "57a60fa5-be59-48b8-b72a-6f7d945ba821" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.343s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 964.323886] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c7a323285fbd42528a7dd4c2dfe3efcd [ 964.412300] env[61273]: DEBUG oslo_concurrency.lockutils [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.647s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 964.413049] env[61273]: ERROR nova.compute.manager [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 34ca79ff-567b-43f8-bc85-994e4b2ce95a, please check neutron logs for more information. [ 964.413049] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] Traceback (most recent call last): [ 964.413049] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 964.413049] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] self.driver.spawn(context, instance, image_meta, [ 964.413049] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 964.413049] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 964.413049] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 964.413049] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] vm_ref = self.build_virtual_machine(instance, [ 964.413049] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 964.413049] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] vif_infos = vmwarevif.get_vif_info(self._session, [ 964.413049] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 964.413444] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] for vif in network_info: [ 964.413444] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 964.413444] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] return self._sync_wrapper(fn, *args, **kwargs) [ 964.413444] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 964.413444] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] self.wait() [ 964.413444] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 964.413444] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] self[:] = self._gt.wait() [ 964.413444] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 964.413444] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] return self._exit_event.wait() [ 964.413444] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 964.413444] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] current.throw(*self._exc) [ 964.413444] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 964.413444] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] result = function(*args, **kwargs) [ 964.413829] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 964.413829] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] return func(*args, **kwargs) [ 964.413829] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 964.413829] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] raise e [ 964.413829] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 964.413829] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] nwinfo = self.network_api.allocate_for_instance( [ 964.413829] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 964.413829] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] created_port_ids = self._update_ports_for_instance( [ 964.413829] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 964.413829] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] with excutils.save_and_reraise_exception(): [ 964.413829] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 964.413829] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] self.force_reraise() [ 964.413829] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 964.414216] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] raise self.value [ 964.414216] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 964.414216] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] updated_port = self._update_port( [ 964.414216] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 964.414216] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] _ensure_no_port_binding_failure(port) [ 964.414216] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 964.414216] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] raise exception.PortBindingFailed(port_id=port['id']) [ 964.414216] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] nova.exception.PortBindingFailed: Binding failed for port 34ca79ff-567b-43f8-bc85-994e4b2ce95a, please check neutron logs for more information. [ 964.414216] env[61273]: ERROR nova.compute.manager [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] [ 964.414216] env[61273]: DEBUG nova.compute.utils [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] Binding failed for port 34ca79ff-567b-43f8-bc85-994e4b2ce95a, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 964.415002] env[61273]: DEBUG oslo_concurrency.lockutils [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 2.927s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 964.420841] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 97f18517a63044d2a92127c749ffd0ce in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 964.422665] env[61273]: DEBUG nova.compute.manager [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] Build of instance a007a7bc-b898-472b-9469-e6dd6262ea0b was re-scheduled: Binding failed for port 34ca79ff-567b-43f8-bc85-994e4b2ce95a, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 964.423237] env[61273]: DEBUG nova.compute.manager [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 964.423534] env[61273]: DEBUG oslo_concurrency.lockutils [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Acquiring lock "refresh_cache-a007a7bc-b898-472b-9469-e6dd6262ea0b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 964.423730] env[61273]: DEBUG oslo_concurrency.lockutils [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Acquired lock "refresh_cache-a007a7bc-b898-472b-9469-e6dd6262ea0b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 964.423943] env[61273]: DEBUG nova.network.neutron [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 964.425086] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 85f3693d631e44518a5251b33732afae in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 964.444024] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 85f3693d631e44518a5251b33732afae [ 964.468536] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 97f18517a63044d2a92127c749ffd0ce [ 964.682235] env[61273]: DEBUG nova.compute.manager [req-b71e607e-968d-4c26-a605-fcd363ca894c req-20c96798-24b0-4ec3-9a2a-d467d13cae1d service nova] [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] Received event network-changed-0b50a699-916e-4d70-a670-340d103c5ef6 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 964.682235] env[61273]: DEBUG nova.compute.manager [req-b71e607e-968d-4c26-a605-fcd363ca894c req-20c96798-24b0-4ec3-9a2a-d467d13cae1d service nova] [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] Refreshing instance network info cache due to event network-changed-0b50a699-916e-4d70-a670-340d103c5ef6. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 964.682235] env[61273]: DEBUG oslo_concurrency.lockutils [req-b71e607e-968d-4c26-a605-fcd363ca894c req-20c96798-24b0-4ec3-9a2a-d467d13cae1d service nova] Acquiring lock "refresh_cache-c181e857-2fcf-4658-9e90-fff396a5eaf5" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 964.682235] env[61273]: DEBUG oslo_concurrency.lockutils [req-b71e607e-968d-4c26-a605-fcd363ca894c req-20c96798-24b0-4ec3-9a2a-d467d13cae1d service nova] Acquired lock "refresh_cache-c181e857-2fcf-4658-9e90-fff396a5eaf5" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 964.682235] env[61273]: DEBUG nova.network.neutron [req-b71e607e-968d-4c26-a605-fcd363ca894c req-20c96798-24b0-4ec3-9a2a-d467d13cae1d service nova] [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] Refreshing network info cache for port 0b50a699-916e-4d70-a670-340d103c5ef6 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 964.682658] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-b71e607e-968d-4c26-a605-fcd363ca894c req-20c96798-24b0-4ec3-9a2a-d467d13cae1d service nova] Expecting reply to msg 3a4d4b86f7f248aaba655f0a6dd4c520 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 964.687819] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3a4d4b86f7f248aaba655f0a6dd4c520 [ 964.789770] env[61273]: DEBUG nova.compute.manager [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 964.839343] env[61273]: DEBUG nova.virt.hardware [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 964.839343] env[61273]: DEBUG nova.virt.hardware [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 964.839343] env[61273]: DEBUG nova.virt.hardware [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 964.839557] env[61273]: DEBUG nova.virt.hardware [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 964.839557] env[61273]: DEBUG nova.virt.hardware [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 964.839557] env[61273]: DEBUG nova.virt.hardware [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 964.839557] env[61273]: DEBUG nova.virt.hardware [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 964.839557] env[61273]: DEBUG nova.virt.hardware [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 964.839750] env[61273]: DEBUG nova.virt.hardware [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 964.839750] env[61273]: DEBUG nova.virt.hardware [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 964.839750] env[61273]: DEBUG nova.virt.hardware [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 964.839750] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e36837c-7814-4ac2-bd35-1bbff5133749 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.844278] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-693a7945-130a-4af4-8d7d-a89d37461382 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.879077] env[61273]: ERROR nova.compute.manager [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 0b50a699-916e-4d70-a670-340d103c5ef6, please check neutron logs for more information. [ 964.879077] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 964.879077] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 964.879077] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 964.879077] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 964.879077] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 964.879077] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 964.879077] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 964.879077] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 964.879077] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 964.879077] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 964.879077] env[61273]: ERROR nova.compute.manager raise self.value [ 964.879077] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 964.879077] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 964.879077] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 964.879077] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 964.879747] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 964.879747] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 964.879747] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 0b50a699-916e-4d70-a670-340d103c5ef6, please check neutron logs for more information. [ 964.879747] env[61273]: ERROR nova.compute.manager [ 964.880159] env[61273]: Traceback (most recent call last): [ 964.880267] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 964.880267] env[61273]: listener.cb(fileno) [ 964.880363] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 964.880363] env[61273]: result = function(*args, **kwargs) [ 964.880443] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 964.880443] env[61273]: return func(*args, **kwargs) [ 964.880561] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 964.880561] env[61273]: raise e [ 964.880641] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 964.880641] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 964.880712] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 964.880712] env[61273]: created_port_ids = self._update_ports_for_instance( [ 964.880784] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 964.880784] env[61273]: with excutils.save_and_reraise_exception(): [ 964.880855] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 964.880855] env[61273]: self.force_reraise() [ 964.880924] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 964.880924] env[61273]: raise self.value [ 964.880993] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 964.880993] env[61273]: updated_port = self._update_port( [ 964.881115] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 964.881115] env[61273]: _ensure_no_port_binding_failure(port) [ 964.881199] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 964.881199] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 964.881282] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 0b50a699-916e-4d70-a670-340d103c5ef6, please check neutron logs for more information. [ 964.881336] env[61273]: Removing descriptor: 19 [ 964.882199] env[61273]: ERROR nova.compute.manager [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 0b50a699-916e-4d70-a670-340d103c5ef6, please check neutron logs for more information. [ 964.882199] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] Traceback (most recent call last): [ 964.882199] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 964.882199] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] yield resources [ 964.882199] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 964.882199] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] self.driver.spawn(context, instance, image_meta, [ 964.882199] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 964.882199] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 964.882199] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 964.882199] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] vm_ref = self.build_virtual_machine(instance, [ 964.882199] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 964.882614] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] vif_infos = vmwarevif.get_vif_info(self._session, [ 964.882614] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 964.882614] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] for vif in network_info: [ 964.882614] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 964.882614] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] return self._sync_wrapper(fn, *args, **kwargs) [ 964.882614] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 964.882614] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] self.wait() [ 964.882614] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 964.882614] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] self[:] = self._gt.wait() [ 964.882614] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 964.882614] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] return self._exit_event.wait() [ 964.882614] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 964.882614] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] result = hub.switch() [ 964.883053] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 964.883053] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] return self.greenlet.switch() [ 964.883053] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 964.883053] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] result = function(*args, **kwargs) [ 964.883053] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 964.883053] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] return func(*args, **kwargs) [ 964.883053] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 964.883053] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] raise e [ 964.883053] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 964.883053] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] nwinfo = self.network_api.allocate_for_instance( [ 964.883053] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 964.883053] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] created_port_ids = self._update_ports_for_instance( [ 964.883053] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 964.883453] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] with excutils.save_and_reraise_exception(): [ 964.883453] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 964.883453] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] self.force_reraise() [ 964.883453] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 964.883453] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] raise self.value [ 964.883453] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 964.883453] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] updated_port = self._update_port( [ 964.883453] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 964.883453] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] _ensure_no_port_binding_failure(port) [ 964.883453] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 964.883453] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] raise exception.PortBindingFailed(port_id=port['id']) [ 964.883453] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] nova.exception.PortBindingFailed: Binding failed for port 0b50a699-916e-4d70-a670-340d103c5ef6, please check neutron logs for more information. [ 964.883453] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] [ 964.884257] env[61273]: INFO nova.compute.manager [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] Terminating instance [ 964.886854] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] Acquiring lock "refresh_cache-c181e857-2fcf-4658-9e90-fff396a5eaf5" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 964.991644] env[61273]: DEBUG nova.network.neutron [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 965.006069] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67f0f7fa-c3c4-41a4-99ee-c10d0190d5c0 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.013837] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7dcf8d4-8595-4512-92ed-5eea6f60a5de {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.045491] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb1043ff-1221-4e74-afc1-28fea0ec7da1 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.052343] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b460e151-2353-43d4-8582-633454241b69 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.065185] env[61273]: DEBUG nova.compute.provider_tree [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 965.065691] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 4f64763f761c4fd684a35a25fc29fa3e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 965.076357] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4f64763f761c4fd684a35a25fc29fa3e [ 965.109957] env[61273]: DEBUG nova.network.neutron [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 965.109957] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg f66374b478b74ac1a0e0e69d5a878468 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 965.117567] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f66374b478b74ac1a0e0e69d5a878468 [ 965.199054] env[61273]: DEBUG nova.network.neutron [req-b71e607e-968d-4c26-a605-fcd363ca894c req-20c96798-24b0-4ec3-9a2a-d467d13cae1d service nova] [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 965.269272] env[61273]: DEBUG nova.network.neutron [req-b71e607e-968d-4c26-a605-fcd363ca894c req-20c96798-24b0-4ec3-9a2a-d467d13cae1d service nova] [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 965.269881] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-b71e607e-968d-4c26-a605-fcd363ca894c req-20c96798-24b0-4ec3-9a2a-d467d13cae1d service nova] Expecting reply to msg 4f7c8a7bedbb473cbd12b34b750746db in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 965.279695] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4f7c8a7bedbb473cbd12b34b750746db [ 965.570102] env[61273]: DEBUG nova.scheduler.client.report [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 965.573333] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg fc27de422a744088b354860053ba3eef in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 965.585915] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fc27de422a744088b354860053ba3eef [ 965.614126] env[61273]: DEBUG oslo_concurrency.lockutils [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Releasing lock "refresh_cache-a007a7bc-b898-472b-9469-e6dd6262ea0b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 965.614364] env[61273]: DEBUG nova.compute.manager [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 965.614542] env[61273]: DEBUG nova.compute.manager [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 965.614705] env[61273]: DEBUG nova.network.neutron [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 965.633409] env[61273]: DEBUG nova.network.neutron [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 965.634070] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 624c01935b4a425996d6de10088bc6dd in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 965.642458] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 624c01935b4a425996d6de10088bc6dd [ 965.772253] env[61273]: DEBUG oslo_concurrency.lockutils [req-b71e607e-968d-4c26-a605-fcd363ca894c req-20c96798-24b0-4ec3-9a2a-d467d13cae1d service nova] Releasing lock "refresh_cache-c181e857-2fcf-4658-9e90-fff396a5eaf5" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 965.773259] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] Acquired lock "refresh_cache-c181e857-2fcf-4658-9e90-fff396a5eaf5" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 965.773259] env[61273]: DEBUG nova.network.neutron [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 965.773640] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] Expecting reply to msg e053a8836f4d48619422634005bcb82b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 965.779739] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e053a8836f4d48619422634005bcb82b [ 965.931136] env[61273]: DEBUG oslo_concurrency.lockutils [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Acquiring lock "518bde7d-d2b2-4b53-b30c-37a7c9d29064" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 965.931492] env[61273]: DEBUG oslo_concurrency.lockutils [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Lock "518bde7d-d2b2-4b53-b30c-37a7c9d29064" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 965.931869] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Expecting reply to msg 87b9bd35ff944eef86f98e17861ece66 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 965.943733] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 87b9bd35ff944eef86f98e17861ece66 [ 966.075306] env[61273]: DEBUG oslo_concurrency.lockutils [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.660s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 966.075932] env[61273]: ERROR nova.compute.manager [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 818e2199-9d00-48ea-a71b-1cb7011eae1e, please check neutron logs for more information. [ 966.075932] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] Traceback (most recent call last): [ 966.075932] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 966.075932] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] self.driver.spawn(context, instance, image_meta, [ 966.075932] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 966.075932] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 966.075932] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 966.075932] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] vm_ref = self.build_virtual_machine(instance, [ 966.075932] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 966.075932] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] vif_infos = vmwarevif.get_vif_info(self._session, [ 966.075932] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 966.076328] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] for vif in network_info: [ 966.076328] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 966.076328] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] return self._sync_wrapper(fn, *args, **kwargs) [ 966.076328] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 966.076328] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] self.wait() [ 966.076328] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 966.076328] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] self[:] = self._gt.wait() [ 966.076328] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 966.076328] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] return self._exit_event.wait() [ 966.076328] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 966.076328] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] current.throw(*self._exc) [ 966.076328] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 966.076328] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] result = function(*args, **kwargs) [ 966.076732] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 966.076732] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] return func(*args, **kwargs) [ 966.076732] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 966.076732] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] raise e [ 966.076732] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 966.076732] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] nwinfo = self.network_api.allocate_for_instance( [ 966.076732] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 966.076732] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] created_port_ids = self._update_ports_for_instance( [ 966.076732] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 966.076732] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] with excutils.save_and_reraise_exception(): [ 966.076732] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 966.076732] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] self.force_reraise() [ 966.076732] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 966.077134] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] raise self.value [ 966.077134] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 966.077134] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] updated_port = self._update_port( [ 966.077134] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 966.077134] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] _ensure_no_port_binding_failure(port) [ 966.077134] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 966.077134] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] raise exception.PortBindingFailed(port_id=port['id']) [ 966.077134] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] nova.exception.PortBindingFailed: Binding failed for port 818e2199-9d00-48ea-a71b-1cb7011eae1e, please check neutron logs for more information. [ 966.077134] env[61273]: ERROR nova.compute.manager [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] [ 966.077134] env[61273]: DEBUG nova.compute.utils [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] Binding failed for port 818e2199-9d00-48ea-a71b-1cb7011eae1e, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 966.083815] env[61273]: DEBUG nova.compute.manager [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] Build of instance c24218f8-6989-4e2f-88b4-f4421b66ec3e was re-scheduled: Binding failed for port 818e2199-9d00-48ea-a71b-1cb7011eae1e, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 966.084399] env[61273]: DEBUG nova.compute.manager [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 966.084526] env[61273]: DEBUG oslo_concurrency.lockutils [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Acquiring lock "refresh_cache-c24218f8-6989-4e2f-88b4-f4421b66ec3e" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 966.084673] env[61273]: DEBUG oslo_concurrency.lockutils [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Acquired lock "refresh_cache-c24218f8-6989-4e2f-88b4-f4421b66ec3e" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 966.084830] env[61273]: DEBUG nova.network.neutron [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 966.085305] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg bc743a979015406ca71661521dc54b3f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 966.102320] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bc743a979015406ca71661521dc54b3f [ 966.139794] env[61273]: DEBUG nova.network.neutron [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 966.140347] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg a534143e187242d2be71a30fca7b6fda in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 966.152430] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a534143e187242d2be71a30fca7b6fda [ 966.293186] env[61273]: DEBUG nova.network.neutron [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 966.417150] env[61273]: DEBUG nova.network.neutron [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 966.417691] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] Expecting reply to msg 6d702b2599a04692af0543791bc93f8a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 966.427162] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6d702b2599a04692af0543791bc93f8a [ 966.433706] env[61273]: DEBUG nova.compute.manager [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 966.435379] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Expecting reply to msg 497ea15276344d4b943c3be3cd066bef in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 966.473221] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 497ea15276344d4b943c3be3cd066bef [ 966.603256] env[61273]: DEBUG nova.network.neutron [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 966.643164] env[61273]: INFO nova.compute.manager [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: a007a7bc-b898-472b-9469-e6dd6262ea0b] Took 1.03 seconds to deallocate network for instance. [ 966.645017] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 295b79276ec74817a674b678021bfa61 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 966.678384] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 295b79276ec74817a674b678021bfa61 [ 966.680041] env[61273]: DEBUG nova.network.neutron [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 966.680592] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 3e4d8799149c40b4a5e32626efe3dac2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 966.690023] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3e4d8799149c40b4a5e32626efe3dac2 [ 966.706091] env[61273]: DEBUG nova.compute.manager [req-49eb0f87-0125-4e7e-ac16-d99ebbe8d8ba req-32529306-dbc8-40dd-88ad-5de6bb2b9790 service nova] [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] Received event network-vif-deleted-0b50a699-916e-4d70-a670-340d103c5ef6 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 966.922621] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] Releasing lock "refresh_cache-c181e857-2fcf-4658-9e90-fff396a5eaf5" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 966.923073] env[61273]: DEBUG nova.compute.manager [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 966.923277] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 966.923588] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c5ca806d-b4a9-41af-8dfc-9668de425a31 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.932474] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51704d05-13d0-44b1-a064-22074c87ad51 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.953361] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c181e857-2fcf-4658-9e90-fff396a5eaf5 could not be found. [ 966.953573] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 966.953749] env[61273]: INFO nova.compute.manager [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] Took 0.03 seconds to destroy the instance on the hypervisor. [ 966.953988] env[61273]: DEBUG oslo.service.loopingcall [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 966.954200] env[61273]: DEBUG nova.compute.manager [-] [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 966.954295] env[61273]: DEBUG nova.network.neutron [-] [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 966.963677] env[61273]: DEBUG oslo_concurrency.lockutils [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 966.963919] env[61273]: DEBUG oslo_concurrency.lockutils [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 966.965336] env[61273]: INFO nova.compute.claims [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 966.967005] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Expecting reply to msg 37fe933f1f9440ab99f9817208bcdf57 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 966.969601] env[61273]: DEBUG nova.network.neutron [-] [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 966.970404] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 864186f951f645c6874d7a12ecd4a05c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 966.979061] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 864186f951f645c6874d7a12ecd4a05c [ 967.004936] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 37fe933f1f9440ab99f9817208bcdf57 [ 967.152032] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg b3b79864c7ee4f2c897dd6938d0c1884 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 967.182717] env[61273]: DEBUG oslo_concurrency.lockutils [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Releasing lock "refresh_cache-c24218f8-6989-4e2f-88b4-f4421b66ec3e" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 967.182896] env[61273]: DEBUG nova.compute.manager [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 967.183085] env[61273]: DEBUG nova.compute.manager [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 967.183251] env[61273]: DEBUG nova.network.neutron [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 967.185674] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b3b79864c7ee4f2c897dd6938d0c1884 [ 967.198769] env[61273]: DEBUG nova.network.neutron [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 967.199366] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg deff6507f8184dadb8fd96be27921ad3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 967.205658] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg deff6507f8184dadb8fd96be27921ad3 [ 967.470396] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Expecting reply to msg e4901b12f83c4fe6a8bdbf7843326a87 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 967.473014] env[61273]: DEBUG nova.network.neutron [-] [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 967.473375] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 2bb68977edc74b95bf6cf845a22f5bbd in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 967.478271] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e4901b12f83c4fe6a8bdbf7843326a87 [ 967.480658] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2bb68977edc74b95bf6cf845a22f5bbd [ 967.673013] env[61273]: INFO nova.scheduler.client.report [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Deleted allocations for instance a007a7bc-b898-472b-9469-e6dd6262ea0b [ 967.679226] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 33ab34dbfe2b4e9788b447da55d26c6b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 967.690031] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 33ab34dbfe2b4e9788b447da55d26c6b [ 967.701845] env[61273]: DEBUG nova.network.neutron [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 967.702340] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg e948fcc183f646639635bcd0c395d6a0 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 967.712156] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e948fcc183f646639635bcd0c395d6a0 [ 967.975689] env[61273]: INFO nova.compute.manager [-] [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] Took 1.02 seconds to deallocate network for instance. [ 967.977989] env[61273]: DEBUG nova.compute.claims [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 967.978170] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 968.021116] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc13c15a-b195-439e-82db-5eae8a04cd51 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.028818] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-023e8ca2-924b-4b6a-9b19-79b1fc0e04b6 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.058549] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b07bcdc9-ae21-477d-ace2-1bbd8ad6f244 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.065375] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e19e878-94bb-4ad9-ae24-ec3e94f83c40 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.078006] env[61273]: DEBUG nova.compute.provider_tree [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 968.078514] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Expecting reply to msg 2d6189b64c9b4bf5bbb615af190485c2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 968.085951] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2d6189b64c9b4bf5bbb615af190485c2 [ 968.180797] env[61273]: DEBUG oslo_concurrency.lockutils [None req-eb3e9d2b-3e41-4d16-9930-446e3b8c64e5 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Lock "a007a7bc-b898-472b-9469-e6dd6262ea0b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.671s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 968.204597] env[61273]: INFO nova.compute.manager [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: c24218f8-6989-4e2f-88b4-f4421b66ec3e] Took 1.02 seconds to deallocate network for instance. [ 968.206394] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg cbef9057bc1f4cf584f8fab1132a098f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 968.240945] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cbef9057bc1f4cf584f8fab1132a098f [ 968.581719] env[61273]: DEBUG nova.scheduler.client.report [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 968.584096] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Expecting reply to msg 7eaa7c6c087f4e11b0f8eb95b5a8ae3a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 968.597212] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7eaa7c6c087f4e11b0f8eb95b5a8ae3a [ 968.710672] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 71ec536935ea4c0481f6e092a54d8752 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 968.741543] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 71ec536935ea4c0481f6e092a54d8752 [ 969.087329] env[61273]: DEBUG oslo_concurrency.lockutils [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.123s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 969.087959] env[61273]: DEBUG nova.compute.manager [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 969.089877] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Expecting reply to msg 7063e05ec0b144238ff6ba5050ed601e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 969.091006] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 1.113s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 969.093242] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] Expecting reply to msg dd13a08bda374a0d8ce0c99c4ec296b1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 969.129605] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7063e05ec0b144238ff6ba5050ed601e [ 969.130052] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dd13a08bda374a0d8ce0c99c4ec296b1 [ 969.233873] env[61273]: INFO nova.scheduler.client.report [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Deleted allocations for instance c24218f8-6989-4e2f-88b4-f4421b66ec3e [ 969.240551] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 6278c88fdd794104b95cb50ef29a090d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 969.252872] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6278c88fdd794104b95cb50ef29a090d [ 969.595940] env[61273]: DEBUG nova.compute.utils [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 969.596131] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Expecting reply to msg 882fdc0b8ed544ba924e4a85afc900d4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 969.600135] env[61273]: DEBUG nova.compute.manager [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Not allocating networking since 'none' was specified. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 969.615093] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 882fdc0b8ed544ba924e4a85afc900d4 [ 969.645701] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f09d357-156e-4c8e-99a2-3810dd79a879 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.653354] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c902a9e-de18-4b5e-918c-9c9a6dcb7e9c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.683073] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c6c7c1b-5cb3-424a-8afd-f1496dac605a {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.689999] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f97401f3-fc20-402a-9bc3-7b87213b9520 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.702858] env[61273]: DEBUG nova.compute.provider_tree [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 969.703653] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] Expecting reply to msg 89b3d6157a1843f3854267a9a0b69d29 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 969.713115] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 89b3d6157a1843f3854267a9a0b69d29 [ 969.743207] env[61273]: DEBUG oslo_concurrency.lockutils [None req-be1fecdf-ad65-4d82-8eed-1490986ab7bf tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Lock "c24218f8-6989-4e2f-88b4-f4421b66ec3e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.587s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 969.784162] env[61273]: DEBUG oslo_concurrency.lockutils [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Acquiring lock "8e12a771-3033-419b-932e-131821d6e1fe" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 969.784407] env[61273]: DEBUG oslo_concurrency.lockutils [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Lock "8e12a771-3033-419b-932e-131821d6e1fe" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 969.784912] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 909f7328bb5f4fc9bcb64440f591dae7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 969.793492] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 909f7328bb5f4fc9bcb64440f591dae7 [ 970.105957] env[61273]: DEBUG nova.compute.manager [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 970.107740] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Expecting reply to msg f98af08b6cc94684adcc5c4355f87c68 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 970.137807] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f98af08b6cc94684adcc5c4355f87c68 [ 970.207165] env[61273]: DEBUG nova.scheduler.client.report [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 970.209568] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] Expecting reply to msg 4e710e51a2bf44e88c7c090ed5e16a5f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 970.220110] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4e710e51a2bf44e88c7c090ed5e16a5f [ 970.286894] env[61273]: DEBUG nova.compute.manager [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 8e12a771-3033-419b-932e-131821d6e1fe] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 970.288648] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 02f4fd54eebe4e458b1cc974fc5f8846 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 970.318596] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 02f4fd54eebe4e458b1cc974fc5f8846 [ 970.612499] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Expecting reply to msg c4a5ad8f09524d23aaec64bb6418bbdc in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 970.646448] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c4a5ad8f09524d23aaec64bb6418bbdc [ 970.712107] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.621s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 970.713010] env[61273]: ERROR nova.compute.manager [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 0b50a699-916e-4d70-a670-340d103c5ef6, please check neutron logs for more information. [ 970.713010] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] Traceback (most recent call last): [ 970.713010] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 970.713010] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] self.driver.spawn(context, instance, image_meta, [ 970.713010] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 970.713010] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 970.713010] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 970.713010] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] vm_ref = self.build_virtual_machine(instance, [ 970.713010] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 970.713010] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] vif_infos = vmwarevif.get_vif_info(self._session, [ 970.713010] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 970.713425] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] for vif in network_info: [ 970.713425] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 970.713425] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] return self._sync_wrapper(fn, *args, **kwargs) [ 970.713425] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 970.713425] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] self.wait() [ 970.713425] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 970.713425] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] self[:] = self._gt.wait() [ 970.713425] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 970.713425] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] return self._exit_event.wait() [ 970.713425] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 970.713425] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] result = hub.switch() [ 970.713425] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 970.713425] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] return self.greenlet.switch() [ 970.713848] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 970.713848] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] result = function(*args, **kwargs) [ 970.713848] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 970.713848] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] return func(*args, **kwargs) [ 970.713848] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 970.713848] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] raise e [ 970.713848] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 970.713848] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] nwinfo = self.network_api.allocate_for_instance( [ 970.713848] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 970.713848] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] created_port_ids = self._update_ports_for_instance( [ 970.713848] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 970.713848] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] with excutils.save_and_reraise_exception(): [ 970.713848] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 970.714251] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] self.force_reraise() [ 970.714251] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 970.714251] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] raise self.value [ 970.714251] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 970.714251] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] updated_port = self._update_port( [ 970.714251] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 970.714251] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] _ensure_no_port_binding_failure(port) [ 970.714251] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 970.714251] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] raise exception.PortBindingFailed(port_id=port['id']) [ 970.714251] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] nova.exception.PortBindingFailed: Binding failed for port 0b50a699-916e-4d70-a670-340d103c5ef6, please check neutron logs for more information. [ 970.714251] env[61273]: ERROR nova.compute.manager [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] [ 970.715461] env[61273]: DEBUG nova.compute.utils [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] Binding failed for port 0b50a699-916e-4d70-a670-340d103c5ef6, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 970.717204] env[61273]: DEBUG nova.compute.manager [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] Build of instance c181e857-2fcf-4658-9e90-fff396a5eaf5 was re-scheduled: Binding failed for port 0b50a699-916e-4d70-a670-340d103c5ef6, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 970.717828] env[61273]: DEBUG nova.compute.manager [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 970.718175] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] Acquiring lock "refresh_cache-c181e857-2fcf-4658-9e90-fff396a5eaf5" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 970.718434] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] Acquired lock "refresh_cache-c181e857-2fcf-4658-9e90-fff396a5eaf5" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 970.718708] env[61273]: DEBUG nova.network.neutron [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 970.719254] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] Expecting reply to msg e1b3a223df5a423e870faaf773de4142 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 970.725944] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e1b3a223df5a423e870faaf773de4142 [ 970.809043] env[61273]: DEBUG oslo_concurrency.lockutils [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 970.809292] env[61273]: DEBUG oslo_concurrency.lockutils [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 970.810827] env[61273]: INFO nova.compute.claims [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 8e12a771-3033-419b-932e-131821d6e1fe] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 970.812459] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 79f16222c20d4753ac7fc1e79e06b6b4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 970.847368] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 79f16222c20d4753ac7fc1e79e06b6b4 [ 971.118193] env[61273]: DEBUG nova.compute.manager [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 971.141300] env[61273]: DEBUG nova.virt.hardware [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 971.141811] env[61273]: DEBUG nova.virt.hardware [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 971.142123] env[61273]: DEBUG nova.virt.hardware [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 971.142447] env[61273]: DEBUG nova.virt.hardware [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 971.142780] env[61273]: DEBUG nova.virt.hardware [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 971.143073] env[61273]: DEBUG nova.virt.hardware [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 971.143408] env[61273]: DEBUG nova.virt.hardware [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 971.143696] env[61273]: DEBUG nova.virt.hardware [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 971.143984] env[61273]: DEBUG nova.virt.hardware [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 971.144315] env[61273]: DEBUG nova.virt.hardware [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 971.144617] env[61273]: DEBUG nova.virt.hardware [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 971.145571] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ad94b98-ddd9-4616-97bd-b446e8a6dd91 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.154749] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64faa763-ff64-41db-b76d-d849921131f8 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.167471] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Instance VIF info [] {{(pid=61273) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 971.173231] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Creating folder: Project (fbfacf2c07f543ba85dd1b13ef38f78f). Parent ref: group-v103328. {{(pid=61273) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 971.173637] env[61273]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b380e4c6-f230-4e11-9fcb-3b714cafd7ff {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.189062] env[61273]: INFO nova.virt.vmwareapi.vm_util [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Created folder: Project (fbfacf2c07f543ba85dd1b13ef38f78f) in parent group-v103328. [ 971.189467] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Creating folder: Instances. Parent ref: group-v103357. {{(pid=61273) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 971.189832] env[61273]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d7815d57-9605-4197-adfd-82aadc600d7e {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.197632] env[61273]: INFO nova.virt.vmwareapi.vm_util [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Created folder: Instances in parent group-v103357. [ 971.197988] env[61273]: DEBUG oslo.service.loopingcall [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 971.198282] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Creating VM on the ESX host {{(pid=61273) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 971.198585] env[61273]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a4224937-1790-434e-9d99-af2e93c6c20a {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.213599] env[61273]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 971.213599] env[61273]: value = "task-375347" [ 971.213599] env[61273]: _type = "Task" [ 971.213599] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.220813] env[61273]: DEBUG oslo_vmware.api [-] Task: {'id': task-375347, 'name': CreateVM_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.236533] env[61273]: DEBUG nova.network.neutron [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 971.316886] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 0226bee6e57e4acea8b9bcd8e38d1dc1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 971.322928] env[61273]: DEBUG nova.network.neutron [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 971.323394] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] Expecting reply to msg 59e8d2f70db14922a1a7a430084c93e1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 971.325534] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0226bee6e57e4acea8b9bcd8e38d1dc1 [ 971.331421] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 59e8d2f70db14922a1a7a430084c93e1 [ 971.458542] env[61273]: DEBUG oslo_concurrency.lockutils [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Acquiring lock "008a11da-9a85-47d8-9731-602ae35aff64" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 971.458765] env[61273]: DEBUG oslo_concurrency.lockutils [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Lock "008a11da-9a85-47d8-9731-602ae35aff64" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 971.459209] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 31c0867fc9754235a3fea4da59eac6d1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 971.468582] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 31c0867fc9754235a3fea4da59eac6d1 [ 971.724496] env[61273]: DEBUG oslo_vmware.api [-] Task: {'id': task-375347, 'name': CreateVM_Task, 'duration_secs': 0.240888} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.724695] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Created VM on the ESX host {{(pid=61273) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 971.725058] env[61273]: DEBUG oslo_concurrency.lockutils [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 971.725225] env[61273]: DEBUG oslo_concurrency.lockutils [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 971.725562] env[61273]: DEBUG oslo_concurrency.lockutils [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 971.725809] env[61273]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-771f5249-c33f-49de-8c2c-952990851667 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.730031] env[61273]: DEBUG oslo_vmware.api [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Waiting for the task: (returnval){ [ 971.730031] env[61273]: value = "session[527e49f3-ce92-061c-8407-29c8a2392124]52e7c0db-f99e-30d3-31f3-010518347991" [ 971.730031] env[61273]: _type = "Task" [ 971.730031] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.736932] env[61273]: DEBUG oslo_vmware.api [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]52e7c0db-f99e-30d3-31f3-010518347991, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.827489] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] Releasing lock "refresh_cache-c181e857-2fcf-4658-9e90-fff396a5eaf5" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 971.827489] env[61273]: DEBUG nova.compute.manager [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 971.827489] env[61273]: DEBUG nova.compute.manager [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 971.827489] env[61273]: DEBUG nova.network.neutron [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 971.851569] env[61273]: DEBUG nova.network.neutron [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 971.852186] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] Expecting reply to msg 82934556ab0342b98cc6998cbf248ab4 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 971.860385] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 82934556ab0342b98cc6998cbf248ab4 [ 971.891077] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29ff9cf0-eb3f-4eea-9b7d-3c01bad08e63 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.899831] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02f66d16-d085-4aa3-a3f8-369f29e967f3 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.930148] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37f144bb-1e95-4e63-9243-3887a3e28b6e {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.937633] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16f1584a-fa46-4b2e-8dc3-e205b864aced {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.950679] env[61273]: DEBUG nova.compute.provider_tree [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 971.951179] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 9855c110d20443509e41d548ac0df976 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 971.958307] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9855c110d20443509e41d548ac0df976 [ 971.961161] env[61273]: DEBUG nova.compute.manager [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 008a11da-9a85-47d8-9731-602ae35aff64] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 971.962787] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 9c82bb34faff43c1a428d768493324c3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 971.996461] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9c82bb34faff43c1a428d768493324c3 [ 972.239949] env[61273]: DEBUG oslo_vmware.api [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]52e7c0db-f99e-30d3-31f3-010518347991, 'name': SearchDatastore_Task, 'duration_secs': 0.008938} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.240435] env[61273]: DEBUG oslo_concurrency.lockutils [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 972.240647] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Processing image 4a9e718e-a6a1-4b4a-b567-8e55529b2d5b {{(pid=61273) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 972.240949] env[61273]: DEBUG oslo_concurrency.lockutils [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 972.241060] env[61273]: DEBUG oslo_concurrency.lockutils [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 972.241233] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61273) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 972.241482] env[61273]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a36d1cbe-3987-40d0-a258-94a3270e25e6 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.248708] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61273) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 972.248848] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61273) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 972.249554] env[61273]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4a6dc3cc-de31-434a-8806-9476871b3ee9 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.254091] env[61273]: DEBUG oslo_vmware.api [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Waiting for the task: (returnval){ [ 972.254091] env[61273]: value = "session[527e49f3-ce92-061c-8407-29c8a2392124]52f96e54-78b7-d94f-f2dc-2e416616554d" [ 972.254091] env[61273]: _type = "Task" [ 972.254091] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.261381] env[61273]: DEBUG oslo_vmware.api [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]52f96e54-78b7-d94f-f2dc-2e416616554d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.354276] env[61273]: DEBUG nova.network.neutron [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 972.354813] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] Expecting reply to msg a119c440f9d642ccb61bda80bcc74b0c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 972.364390] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a119c440f9d642ccb61bda80bcc74b0c [ 972.453701] env[61273]: DEBUG nova.scheduler.client.report [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 972.456129] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 63d297c906094dc2b1db86405f3906dc in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 972.467197] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 63d297c906094dc2b1db86405f3906dc [ 972.479134] env[61273]: DEBUG oslo_concurrency.lockutils [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 972.764655] env[61273]: DEBUG oslo_vmware.api [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]52f96e54-78b7-d94f-f2dc-2e416616554d, 'name': SearchDatastore_Task, 'duration_secs': 0.007522} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.765524] env[61273]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-01128cb2-9440-411f-a224-a761cc372f72 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.770419] env[61273]: DEBUG oslo_vmware.api [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Waiting for the task: (returnval){ [ 972.770419] env[61273]: value = "session[527e49f3-ce92-061c-8407-29c8a2392124]5282f5a6-433f-5ce3-86bb-b5838a88009c" [ 972.770419] env[61273]: _type = "Task" [ 972.770419] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.777548] env[61273]: DEBUG oslo_vmware.api [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]5282f5a6-433f-5ce3-86bb-b5838a88009c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.857396] env[61273]: INFO nova.compute.manager [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] [instance: c181e857-2fcf-4658-9e90-fff396a5eaf5] Took 1.03 seconds to deallocate network for instance. [ 972.859206] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] Expecting reply to msg f79eefb0fc494468b0bb448437ecdefe in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 972.891665] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f79eefb0fc494468b0bb448437ecdefe [ 972.958867] env[61273]: DEBUG oslo_concurrency.lockutils [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.149s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 972.959383] env[61273]: DEBUG nova.compute.manager [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 8e12a771-3033-419b-932e-131821d6e1fe] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 972.961125] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 4aa8017fdc734743841fb1dcc464c300 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 972.962201] env[61273]: DEBUG oslo_concurrency.lockutils [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.483s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 972.963949] env[61273]: INFO nova.compute.claims [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 008a11da-9a85-47d8-9731-602ae35aff64] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 972.965444] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg a8ddd25a7c304ff2904c478cb3ecc0fb in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 972.991387] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4aa8017fdc734743841fb1dcc464c300 [ 972.998119] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a8ddd25a7c304ff2904c478cb3ecc0fb [ 973.050408] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 973.050616] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 973.051226] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 22611e2000824c6c99d094250f76c9d8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 973.063538] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 22611e2000824c6c99d094250f76c9d8 [ 973.280402] env[61273]: DEBUG oslo_vmware.api [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]5282f5a6-433f-5ce3-86bb-b5838a88009c, 'name': SearchDatastore_Task, 'duration_secs': 0.008206} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.280803] env[61273]: DEBUG oslo_concurrency.lockutils [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 973.281070] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk to [datastore1] 518bde7d-d2b2-4b53-b30c-37a7c9d29064/518bde7d-d2b2-4b53-b30c-37a7c9d29064.vmdk {{(pid=61273) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 973.281318] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f6d75aa1-2bdd-459d-9550-91f17465fa62 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.288609] env[61273]: DEBUG oslo_vmware.api [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Waiting for the task: (returnval){ [ 973.288609] env[61273]: value = "task-375348" [ 973.288609] env[61273]: _type = "Task" [ 973.288609] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.295826] env[61273]: DEBUG oslo_vmware.api [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Task: {'id': task-375348, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.364789] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] Expecting reply to msg 25e382b10d2b4f55bf767f0628d8028b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 973.402535] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 25e382b10d2b4f55bf767f0628d8028b [ 973.468206] env[61273]: DEBUG nova.compute.utils [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 973.468888] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 6abf05aee8a24915afee1f5f8d9e1f70 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 973.471114] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg c4ae290bdef94f8585419414b34bdddb in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 973.473105] env[61273]: DEBUG nova.compute.manager [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 8e12a771-3033-419b-932e-131821d6e1fe] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 973.473286] env[61273]: DEBUG nova.network.neutron [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 8e12a771-3033-419b-932e-131821d6e1fe] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 973.480105] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c4ae290bdef94f8585419414b34bdddb [ 973.480984] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6abf05aee8a24915afee1f5f8d9e1f70 [ 973.526544] env[61273]: DEBUG nova.policy [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'af34c4e3d81c4729a9dd4a8531992ff1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd9325f1def284d2a9fdced4e9eeb17f0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 973.557019] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 973.557453] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Starting heal instance info cache {{(pid=61273) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9926}} [ 973.557453] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Rebuilding the list of instances to heal {{(pid=61273) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 973.557885] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 174a489d3ca8479a8c15df98b2983eb5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 973.571661] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 174a489d3ca8479a8c15df98b2983eb5 [ 973.799478] env[61273]: DEBUG oslo_vmware.api [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Task: {'id': task-375348, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.44936} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.799774] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk to [datastore1] 518bde7d-d2b2-4b53-b30c-37a7c9d29064/518bde7d-d2b2-4b53-b30c-37a7c9d29064.vmdk {{(pid=61273) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 973.800015] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Extending root virtual disk to 1048576 {{(pid=61273) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 973.800309] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bcdf0c53-333f-4c35-8d6d-16595a65a9c7 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.806873] env[61273]: DEBUG oslo_vmware.api [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Waiting for the task: (returnval){ [ 973.806873] env[61273]: value = "task-375349" [ 973.806873] env[61273]: _type = "Task" [ 973.806873] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.817006] env[61273]: DEBUG oslo_vmware.api [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Task: {'id': task-375349, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.835668] env[61273]: DEBUG nova.network.neutron [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 8e12a771-3033-419b-932e-131821d6e1fe] Successfully created port: 569cb115-6d92-4aca-968c-f05be2b180c7 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 973.904197] env[61273]: INFO nova.scheduler.client.report [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] Deleted allocations for instance c181e857-2fcf-4658-9e90-fff396a5eaf5 [ 973.910240] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] Expecting reply to msg 9686219aba684524a2806cef0631f0b5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 973.922427] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9686219aba684524a2806cef0631f0b5 [ 973.974018] env[61273]: DEBUG nova.compute.manager [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 8e12a771-3033-419b-932e-131821d6e1fe] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 973.976204] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 08ca9b01451847f5bbb5bd04a2e8922e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 974.011648] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 08ca9b01451847f5bbb5bd04a2e8922e [ 974.033886] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1ab7915-e35e-432c-ac69-9eb9292008e5 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.041806] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5c74893-b2ed-417a-88a1-95aa01b0d36f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.072364] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Skipping network cache update for instance because it is Building. {{(pid=61273) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 974.072532] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] [instance: 8e12a771-3033-419b-932e-131821d6e1fe] Skipping network cache update for instance because it is Building. {{(pid=61273) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 974.072664] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] [instance: 008a11da-9a85-47d8-9731-602ae35aff64] Skipping network cache update for instance because it is Building. {{(pid=61273) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 974.072794] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Didn't find any instances for network info cache update. {{(pid=61273) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10012}} [ 974.073199] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 974.073935] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6b0edea-a60e-489e-9e36-0cd26d293a14 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.076326] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 974.076500] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 974.077056] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 974.077277] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 974.077397] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 974.077529] env[61273]: DEBUG nova.compute.manager [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61273) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10545}} [ 974.077670] env[61273]: DEBUG oslo_service.periodic_task [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Running periodic task ComputeManager.update_available_resource {{(pid=61273) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 974.078018] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 9bdaf6e91930482f87fca661c5766894 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 974.081820] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64929df9-9529-4865-9a96-284baaadb1f9 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.086647] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9bdaf6e91930482f87fca661c5766894 [ 974.095667] env[61273]: DEBUG nova.compute.provider_tree [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 974.096148] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 95ad6a766fdf4aa3ba9c14a6a7ea7bca in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 974.104355] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 95ad6a766fdf4aa3ba9c14a6a7ea7bca [ 974.315924] env[61273]: DEBUG oslo_vmware.api [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Task: {'id': task-375349, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.058092} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.316244] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Extended root virtual disk {{(pid=61273) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 974.316974] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c91bf5e-0ee0-4191-882b-e7e12f7e144d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.336500] env[61273]: DEBUG nova.virt.vmwareapi.volumeops [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Reconfiguring VM instance instance-00000060 to attach disk [datastore1] 518bde7d-d2b2-4b53-b30c-37a7c9d29064/518bde7d-d2b2-4b53-b30c-37a7c9d29064.vmdk or device None with type sparse {{(pid=61273) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 974.337057] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4074818a-76c4-49dd-ba14-0084ca1f8db7 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.356712] env[61273]: DEBUG oslo_vmware.api [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Waiting for the task: (returnval){ [ 974.356712] env[61273]: value = "task-375350" [ 974.356712] env[61273]: _type = "Task" [ 974.356712] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.364737] env[61273]: DEBUG oslo_vmware.api [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Task: {'id': task-375350, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.412458] env[61273]: DEBUG oslo_concurrency.lockutils [None req-7f581d9d-69d7-4504-833a-429b5824e0b6 tempest-ServersTestFqdnHostnames-142808166 tempest-ServersTestFqdnHostnames-142808166-project-member] Lock "c181e857-2fcf-4658-9e90-fff396a5eaf5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.819s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 974.452982] env[61273]: DEBUG nova.compute.manager [req-2fdcbc58-a6a1-4e7d-b016-688038a60ba1 req-7f60d874-3bf3-49c0-96cf-592835a82700 service nova] [instance: 8e12a771-3033-419b-932e-131821d6e1fe] Received event network-changed-569cb115-6d92-4aca-968c-f05be2b180c7 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 974.452982] env[61273]: DEBUG nova.compute.manager [req-2fdcbc58-a6a1-4e7d-b016-688038a60ba1 req-7f60d874-3bf3-49c0-96cf-592835a82700 service nova] [instance: 8e12a771-3033-419b-932e-131821d6e1fe] Refreshing instance network info cache due to event network-changed-569cb115-6d92-4aca-968c-f05be2b180c7. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 974.452982] env[61273]: DEBUG oslo_concurrency.lockutils [req-2fdcbc58-a6a1-4e7d-b016-688038a60ba1 req-7f60d874-3bf3-49c0-96cf-592835a82700 service nova] Acquiring lock "refresh_cache-8e12a771-3033-419b-932e-131821d6e1fe" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 974.452982] env[61273]: DEBUG oslo_concurrency.lockutils [req-2fdcbc58-a6a1-4e7d-b016-688038a60ba1 req-7f60d874-3bf3-49c0-96cf-592835a82700 service nova] Acquired lock "refresh_cache-8e12a771-3033-419b-932e-131821d6e1fe" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 974.453176] env[61273]: DEBUG nova.network.neutron [req-2fdcbc58-a6a1-4e7d-b016-688038a60ba1 req-7f60d874-3bf3-49c0-96cf-592835a82700 service nova] [instance: 8e12a771-3033-419b-932e-131821d6e1fe] Refreshing network info cache for port 569cb115-6d92-4aca-968c-f05be2b180c7 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 974.453837] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-2fdcbc58-a6a1-4e7d-b016-688038a60ba1 req-7f60d874-3bf3-49c0-96cf-592835a82700 service nova] Expecting reply to msg 810f7ddae7ec4ce9b0b2f1b04cbd21f0 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 974.460047] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 810f7ddae7ec4ce9b0b2f1b04cbd21f0 [ 974.483074] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 1e062f6a4a604edb9a16ac24815088b8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 974.512652] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1e062f6a4a604edb9a16ac24815088b8 [ 974.582513] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 974.598945] env[61273]: DEBUG nova.scheduler.client.report [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 974.601458] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg f820beb9d1a7446f9b510559ca94a7c5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 974.614071] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f820beb9d1a7446f9b510559ca94a7c5 [ 974.702736] env[61273]: ERROR nova.compute.manager [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 569cb115-6d92-4aca-968c-f05be2b180c7, please check neutron logs for more information. [ 974.702736] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 974.702736] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 974.702736] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 974.702736] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 974.702736] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 974.702736] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 974.702736] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 974.702736] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 974.702736] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 974.702736] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 974.702736] env[61273]: ERROR nova.compute.manager raise self.value [ 974.702736] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 974.702736] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 974.702736] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 974.702736] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 974.703504] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 974.703504] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 974.703504] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 569cb115-6d92-4aca-968c-f05be2b180c7, please check neutron logs for more information. [ 974.703504] env[61273]: ERROR nova.compute.manager [ 974.703504] env[61273]: Traceback (most recent call last): [ 974.703504] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 974.703504] env[61273]: listener.cb(fileno) [ 974.703504] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 974.703504] env[61273]: result = function(*args, **kwargs) [ 974.703504] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 974.703504] env[61273]: return func(*args, **kwargs) [ 974.703504] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 974.703504] env[61273]: raise e [ 974.703504] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 974.703504] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 974.703504] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 974.703504] env[61273]: created_port_ids = self._update_ports_for_instance( [ 974.703504] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 974.703504] env[61273]: with excutils.save_and_reraise_exception(): [ 974.703504] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 974.703504] env[61273]: self.force_reraise() [ 974.703504] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 974.703504] env[61273]: raise self.value [ 974.703504] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 974.703504] env[61273]: updated_port = self._update_port( [ 974.703504] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 974.703504] env[61273]: _ensure_no_port_binding_failure(port) [ 974.703504] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 974.703504] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 974.704526] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 569cb115-6d92-4aca-968c-f05be2b180c7, please check neutron logs for more information. [ 974.704526] env[61273]: Removing descriptor: 19 [ 974.866884] env[61273]: DEBUG oslo_vmware.api [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Task: {'id': task-375350, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.972862] env[61273]: DEBUG nova.network.neutron [req-2fdcbc58-a6a1-4e7d-b016-688038a60ba1 req-7f60d874-3bf3-49c0-96cf-592835a82700 service nova] [instance: 8e12a771-3033-419b-932e-131821d6e1fe] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 974.986164] env[61273]: DEBUG nova.compute.manager [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 8e12a771-3033-419b-932e-131821d6e1fe] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 975.011946] env[61273]: DEBUG nova.virt.hardware [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 975.012263] env[61273]: DEBUG nova.virt.hardware [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 975.012451] env[61273]: DEBUG nova.virt.hardware [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 975.012655] env[61273]: DEBUG nova.virt.hardware [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 975.012894] env[61273]: DEBUG nova.virt.hardware [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 975.013082] env[61273]: DEBUG nova.virt.hardware [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 975.013313] env[61273]: DEBUG nova.virt.hardware [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 975.013500] env[61273]: DEBUG nova.virt.hardware [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 975.013693] env[61273]: DEBUG nova.virt.hardware [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 975.013869] env[61273]: DEBUG nova.virt.hardware [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 975.014071] env[61273]: DEBUG nova.virt.hardware [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 975.014945] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c621545e-9a42-4618-b259-3ff640baa502 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.023734] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c734c91-7a61-42bd-ab39-baeda0ee7c94 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.039797] env[61273]: ERROR nova.compute.manager [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 8e12a771-3033-419b-932e-131821d6e1fe] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 569cb115-6d92-4aca-968c-f05be2b180c7, please check neutron logs for more information. [ 975.039797] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] Traceback (most recent call last): [ 975.039797] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 975.039797] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] yield resources [ 975.039797] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 975.039797] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] self.driver.spawn(context, instance, image_meta, [ 975.039797] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 975.039797] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] self._vmops.spawn(context, instance, image_meta, injected_files, [ 975.039797] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 975.039797] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] vm_ref = self.build_virtual_machine(instance, [ 975.039797] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 975.040225] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] vif_infos = vmwarevif.get_vif_info(self._session, [ 975.040225] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 975.040225] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] for vif in network_info: [ 975.040225] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 975.040225] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] return self._sync_wrapper(fn, *args, **kwargs) [ 975.040225] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 975.040225] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] self.wait() [ 975.040225] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 975.040225] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] self[:] = self._gt.wait() [ 975.040225] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 975.040225] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] return self._exit_event.wait() [ 975.040225] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 975.040225] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] current.throw(*self._exc) [ 975.040685] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 975.040685] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] result = function(*args, **kwargs) [ 975.040685] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 975.040685] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] return func(*args, **kwargs) [ 975.040685] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 975.040685] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] raise e [ 975.040685] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 975.040685] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] nwinfo = self.network_api.allocate_for_instance( [ 975.040685] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 975.040685] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] created_port_ids = self._update_ports_for_instance( [ 975.040685] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 975.040685] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] with excutils.save_and_reraise_exception(): [ 975.040685] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 975.041170] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] self.force_reraise() [ 975.041170] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 975.041170] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] raise self.value [ 975.041170] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 975.041170] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] updated_port = self._update_port( [ 975.041170] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 975.041170] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] _ensure_no_port_binding_failure(port) [ 975.041170] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 975.041170] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] raise exception.PortBindingFailed(port_id=port['id']) [ 975.041170] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] nova.exception.PortBindingFailed: Binding failed for port 569cb115-6d92-4aca-968c-f05be2b180c7, please check neutron logs for more information. [ 975.041170] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] [ 975.041170] env[61273]: INFO nova.compute.manager [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 8e12a771-3033-419b-932e-131821d6e1fe] Terminating instance [ 975.043397] env[61273]: DEBUG oslo_concurrency.lockutils [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Acquiring lock "refresh_cache-8e12a771-3033-419b-932e-131821d6e1fe" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 975.107214] env[61273]: DEBUG oslo_concurrency.lockutils [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.145s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 975.108047] env[61273]: DEBUG nova.compute.manager [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 008a11da-9a85-47d8-9731-602ae35aff64] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 975.110939] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg ae7dc4f708b64a5a8bb79375499fd2fe in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 975.112402] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.530s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 975.112717] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 975.113009] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61273) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 975.114655] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e34d70fb-e2f8-40e6-aba8-24500ef1b36a {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.126308] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a27eaa2d-105e-4199-af1b-9b96bffe6b08 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.148657] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ae7dc4f708b64a5a8bb79375499fd2fe [ 975.149991] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c34a836-bd4d-4c6e-afb8-8611095ceafd {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.153294] env[61273]: DEBUG nova.network.neutron [req-2fdcbc58-a6a1-4e7d-b016-688038a60ba1 req-7f60d874-3bf3-49c0-96cf-592835a82700 service nova] [instance: 8e12a771-3033-419b-932e-131821d6e1fe] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 975.153770] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-2fdcbc58-a6a1-4e7d-b016-688038a60ba1 req-7f60d874-3bf3-49c0-96cf-592835a82700 service nova] Expecting reply to msg a3d2d78c41f94c308fde4fbb41a5d8dc in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 975.160133] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef36d17e-0d53-4017-b62f-fb17dbeccd08 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.164690] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a3d2d78c41f94c308fde4fbb41a5d8dc [ 975.190946] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181719MB free_disk=141GB free_vcpus=48 pci_devices=None {{(pid=61273) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 975.191099] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 975.191293] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 975.192175] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 75036fb0db134f8d8f6877145e2909db in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 975.210440] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 75036fb0db134f8d8f6877145e2909db [ 975.368611] env[61273]: DEBUG oslo_vmware.api [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Task: {'id': task-375350, 'name': ReconfigVM_Task, 'duration_secs': 0.774073} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.368955] env[61273]: DEBUG nova.virt.vmwareapi.volumeops [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Reconfigured VM instance instance-00000060 to attach disk [datastore1] 518bde7d-d2b2-4b53-b30c-37a7c9d29064/518bde7d-d2b2-4b53-b30c-37a7c9d29064.vmdk or device None with type sparse {{(pid=61273) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 975.369542] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c930dc59-f66d-46a5-be82-cd7e07b9b652 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.375156] env[61273]: DEBUG oslo_vmware.api [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Waiting for the task: (returnval){ [ 975.375156] env[61273]: value = "task-375351" [ 975.375156] env[61273]: _type = "Task" [ 975.375156] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.384079] env[61273]: DEBUG oslo_vmware.api [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Task: {'id': task-375351, 'name': Rename_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.615420] env[61273]: DEBUG nova.compute.utils [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 975.616189] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg fd4c22e9a89b4d98b15e3c9eabdbdf7d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 975.617247] env[61273]: DEBUG nova.compute.manager [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 008a11da-9a85-47d8-9731-602ae35aff64] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 975.617477] env[61273]: DEBUG nova.network.neutron [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 008a11da-9a85-47d8-9731-602ae35aff64] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 975.627100] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fd4c22e9a89b4d98b15e3c9eabdbdf7d [ 975.656135] env[61273]: DEBUG oslo_concurrency.lockutils [req-2fdcbc58-a6a1-4e7d-b016-688038a60ba1 req-7f60d874-3bf3-49c0-96cf-592835a82700 service nova] Releasing lock "refresh_cache-8e12a771-3033-419b-932e-131821d6e1fe" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 975.656600] env[61273]: DEBUG oslo_concurrency.lockutils [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Acquired lock "refresh_cache-8e12a771-3033-419b-932e-131821d6e1fe" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 975.656829] env[61273]: DEBUG nova.network.neutron [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 8e12a771-3033-419b-932e-131821d6e1fe] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 975.657273] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg edb1a811a8fc4f48838fbefc0acc5f54 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 975.663887] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg edb1a811a8fc4f48838fbefc0acc5f54 [ 975.686569] env[61273]: DEBUG nova.policy [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8969ac54b88a47028e5784f6575f2d61', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '63d2fb7de0ad453dbe6891e6974f1b66', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 975.695233] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 1976f7fe5a5f40d5ae233837ca1a488f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 975.705431] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1976f7fe5a5f40d5ae233837ca1a488f [ 975.884863] env[61273]: DEBUG oslo_vmware.api [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Task: {'id': task-375351, 'name': Rename_Task, 'duration_secs': 0.123888} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.885250] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Powering on the VM {{(pid=61273) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 975.885550] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-16b0083c-490c-49e6-b1eb-88911412d256 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.892240] env[61273]: DEBUG oslo_vmware.api [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Waiting for the task: (returnval){ [ 975.892240] env[61273]: value = "task-375352" [ 975.892240] env[61273]: _type = "Task" [ 975.892240] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.900834] env[61273]: DEBUG oslo_vmware.api [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Task: {'id': task-375352, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.965580] env[61273]: DEBUG nova.network.neutron [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 008a11da-9a85-47d8-9731-602ae35aff64] Successfully created port: fafa6f04-80b2-4750-8cda-117f48969222 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 976.121066] env[61273]: DEBUG nova.compute.manager [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 008a11da-9a85-47d8-9731-602ae35aff64] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 976.122945] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg d151a3789b8549e58d93ace754462f77 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 976.167569] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d151a3789b8549e58d93ace754462f77 [ 976.183942] env[61273]: DEBUG nova.network.neutron [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 8e12a771-3033-419b-932e-131821d6e1fe] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 976.220873] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance 518bde7d-d2b2-4b53-b30c-37a7c9d29064 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 976.221162] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance 8e12a771-3033-419b-932e-131821d6e1fe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 976.221396] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Instance 008a11da-9a85-47d8-9731-602ae35aff64 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61273) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 976.221739] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=61273) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 976.222078] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1088MB phys_disk=200GB used_disk=3GB total_vcpus=48 used_vcpus=3 pci_stats=[] {{(pid=61273) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 976.284456] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca72e332-a4a5-4f52-9884-839760a23fcb {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.292081] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cdb5510-d162-40c9-89f7-fa2632e74b3f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.296221] env[61273]: DEBUG nova.network.neutron [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 8e12a771-3033-419b-932e-131821d6e1fe] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 976.296723] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg e8febc1da739496d9aa57630ec2d35b0 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 976.331227] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e8febc1da739496d9aa57630ec2d35b0 [ 976.332200] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cebfcd3-9234-47d5-97bf-06a18a2fc67c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.341132] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e018f36d-c253-4efa-85a7-76640021f311 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.355755] env[61273]: DEBUG nova.compute.provider_tree [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 976.356332] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg bbd32bf7d8504ca4a83d14f38d6b736c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 976.365397] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bbd32bf7d8504ca4a83d14f38d6b736c [ 976.400963] env[61273]: DEBUG oslo_vmware.api [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Task: {'id': task-375352, 'name': PowerOnVM_Task, 'duration_secs': 0.414868} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.401246] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Powered on the VM {{(pid=61273) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 976.401405] env[61273]: INFO nova.compute.manager [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Took 5.29 seconds to spawn the instance on the hypervisor. [ 976.401577] env[61273]: DEBUG nova.compute.manager [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Checking state {{(pid=61273) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 976.402387] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10d96e79-21e1-4c11-9072-96a3e0af1e7f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.409652] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Expecting reply to msg f32d03ad3bd74003bb034d46d7ea65d2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 976.444254] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f32d03ad3bd74003bb034d46d7ea65d2 [ 976.489996] env[61273]: DEBUG nova.compute.manager [req-8e65b4c8-f55c-48c8-b892-0408c812202d req-9a4e84e4-a864-4b45-b257-572383563f52 service nova] [instance: 8e12a771-3033-419b-932e-131821d6e1fe] Received event network-vif-deleted-569cb115-6d92-4aca-968c-f05be2b180c7 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 976.627717] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg b875c2c8741e4006bb6efcbee54a0f82 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 976.659122] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b875c2c8741e4006bb6efcbee54a0f82 [ 976.801804] env[61273]: DEBUG oslo_concurrency.lockutils [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Releasing lock "refresh_cache-8e12a771-3033-419b-932e-131821d6e1fe" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 976.801804] env[61273]: DEBUG nova.compute.manager [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 8e12a771-3033-419b-932e-131821d6e1fe] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 976.801804] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 8e12a771-3033-419b-932e-131821d6e1fe] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 976.801804] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-40f6be6e-42a8-4384-aeca-fa9dd80e7d2b {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.807963] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c903f4a-efa5-4b38-a9ff-cb75de5ec2b2 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.830180] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 8e12a771-3033-419b-932e-131821d6e1fe] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8e12a771-3033-419b-932e-131821d6e1fe could not be found. [ 976.830596] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 8e12a771-3033-419b-932e-131821d6e1fe] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 976.830920] env[61273]: INFO nova.compute.manager [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 8e12a771-3033-419b-932e-131821d6e1fe] Took 0.03 seconds to destroy the instance on the hypervisor. [ 976.831287] env[61273]: DEBUG oslo.service.loopingcall [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 976.831641] env[61273]: DEBUG nova.compute.manager [-] [instance: 8e12a771-3033-419b-932e-131821d6e1fe] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 976.831888] env[61273]: DEBUG nova.network.neutron [-] [instance: 8e12a771-3033-419b-932e-131821d6e1fe] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 976.849450] env[61273]: DEBUG nova.network.neutron [-] [instance: 8e12a771-3033-419b-932e-131821d6e1fe] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 976.850024] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg fb37ad89700c4887844c5d2c751a7596 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 976.859193] env[61273]: DEBUG nova.scheduler.client.report [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 976.861596] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Expecting reply to msg 832a161bcf424d27837c3ae636fc548e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 976.872169] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fb37ad89700c4887844c5d2c751a7596 [ 976.880348] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 832a161bcf424d27837c3ae636fc548e [ 976.923254] env[61273]: INFO nova.compute.manager [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Took 9.98 seconds to build instance. [ 976.923646] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Expecting reply to msg c492df480a284b968d2db2732eb9c3d7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 976.938272] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c492df480a284b968d2db2732eb9c3d7 [ 976.954165] env[61273]: ERROR nova.compute.manager [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port fafa6f04-80b2-4750-8cda-117f48969222, please check neutron logs for more information. [ 976.954165] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 976.954165] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 976.954165] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 976.954165] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 976.954165] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 976.954165] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 976.954165] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 976.954165] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 976.954165] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 976.954165] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 976.954165] env[61273]: ERROR nova.compute.manager raise self.value [ 976.954165] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 976.954165] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 976.954165] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 976.954165] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 976.954764] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 976.954764] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 976.954764] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port fafa6f04-80b2-4750-8cda-117f48969222, please check neutron logs for more information. [ 976.954764] env[61273]: ERROR nova.compute.manager [ 976.954764] env[61273]: Traceback (most recent call last): [ 976.954764] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 976.954764] env[61273]: listener.cb(fileno) [ 976.954764] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 976.954764] env[61273]: result = function(*args, **kwargs) [ 976.954764] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 976.954764] env[61273]: return func(*args, **kwargs) [ 976.954764] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 976.954764] env[61273]: raise e [ 976.954764] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 976.954764] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 976.954764] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 976.954764] env[61273]: created_port_ids = self._update_ports_for_instance( [ 976.954764] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 976.954764] env[61273]: with excutils.save_and_reraise_exception(): [ 976.954764] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 976.954764] env[61273]: self.force_reraise() [ 976.954764] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 976.954764] env[61273]: raise self.value [ 976.954764] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 976.954764] env[61273]: updated_port = self._update_port( [ 976.954764] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 976.954764] env[61273]: _ensure_no_port_binding_failure(port) [ 976.954764] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 976.954764] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 976.955754] env[61273]: nova.exception.PortBindingFailed: Binding failed for port fafa6f04-80b2-4750-8cda-117f48969222, please check neutron logs for more information. [ 976.955754] env[61273]: Removing descriptor: 19 [ 977.130722] env[61273]: DEBUG nova.compute.manager [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 008a11da-9a85-47d8-9731-602ae35aff64] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 977.156877] env[61273]: DEBUG nova.virt.hardware [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 977.157017] env[61273]: DEBUG nova.virt.hardware [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 977.157094] env[61273]: DEBUG nova.virt.hardware [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 977.157335] env[61273]: DEBUG nova.virt.hardware [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 977.157438] env[61273]: DEBUG nova.virt.hardware [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 977.157569] env[61273]: DEBUG nova.virt.hardware [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 977.157780] env[61273]: DEBUG nova.virt.hardware [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 977.157990] env[61273]: DEBUG nova.virt.hardware [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 977.158166] env[61273]: DEBUG nova.virt.hardware [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 977.158329] env[61273]: DEBUG nova.virt.hardware [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 977.158498] env[61273]: DEBUG nova.virt.hardware [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 977.159338] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da77a122-0af3-4163-8f2d-216868d9468b {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.167002] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eddcc01-59a6-4473-99be-87b32e65d2e1 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.180803] env[61273]: ERROR nova.compute.manager [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 008a11da-9a85-47d8-9731-602ae35aff64] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port fafa6f04-80b2-4750-8cda-117f48969222, please check neutron logs for more information. [ 977.180803] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] Traceback (most recent call last): [ 977.180803] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 977.180803] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] yield resources [ 977.180803] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 977.180803] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] self.driver.spawn(context, instance, image_meta, [ 977.180803] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 977.180803] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] self._vmops.spawn(context, instance, image_meta, injected_files, [ 977.180803] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 977.180803] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] vm_ref = self.build_virtual_machine(instance, [ 977.180803] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 977.181245] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] vif_infos = vmwarevif.get_vif_info(self._session, [ 977.181245] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 977.181245] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] for vif in network_info: [ 977.181245] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 977.181245] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] return self._sync_wrapper(fn, *args, **kwargs) [ 977.181245] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 977.181245] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] self.wait() [ 977.181245] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 977.181245] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] self[:] = self._gt.wait() [ 977.181245] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 977.181245] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] return self._exit_event.wait() [ 977.181245] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 977.181245] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] current.throw(*self._exc) [ 977.181774] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 977.181774] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] result = function(*args, **kwargs) [ 977.181774] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 977.181774] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] return func(*args, **kwargs) [ 977.181774] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 977.181774] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] raise e [ 977.181774] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 977.181774] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] nwinfo = self.network_api.allocate_for_instance( [ 977.181774] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 977.181774] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] created_port_ids = self._update_ports_for_instance( [ 977.181774] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 977.181774] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] with excutils.save_and_reraise_exception(): [ 977.181774] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 977.182312] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] self.force_reraise() [ 977.182312] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 977.182312] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] raise self.value [ 977.182312] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 977.182312] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] updated_port = self._update_port( [ 977.182312] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 977.182312] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] _ensure_no_port_binding_failure(port) [ 977.182312] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 977.182312] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] raise exception.PortBindingFailed(port_id=port['id']) [ 977.182312] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] nova.exception.PortBindingFailed: Binding failed for port fafa6f04-80b2-4750-8cda-117f48969222, please check neutron logs for more information. [ 977.182312] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] [ 977.182312] env[61273]: INFO nova.compute.manager [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 008a11da-9a85-47d8-9731-602ae35aff64] Terminating instance [ 977.183032] env[61273]: DEBUG oslo_concurrency.lockutils [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Acquiring lock "refresh_cache-008a11da-9a85-47d8-9731-602ae35aff64" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 977.183201] env[61273]: DEBUG oslo_concurrency.lockutils [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Acquired lock "refresh_cache-008a11da-9a85-47d8-9731-602ae35aff64" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 977.183367] env[61273]: DEBUG nova.network.neutron [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 008a11da-9a85-47d8-9731-602ae35aff64] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 977.183774] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg e23df60d33d646d190fd11d443fb81ee in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 977.192310] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e23df60d33d646d190fd11d443fb81ee [ 977.351667] env[61273]: DEBUG nova.network.neutron [-] [instance: 8e12a771-3033-419b-932e-131821d6e1fe] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 977.352233] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 98eb9a2bc95244068a937c2e96b20ade in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 977.361922] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 98eb9a2bc95244068a937c2e96b20ade [ 977.373426] env[61273]: DEBUG nova.compute.resource_tracker [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61273) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 977.373613] env[61273]: DEBUG oslo_concurrency.lockutils [None req-ccc9d7fc-286a-43ea-b8a7-c68f733844ae None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.182s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 977.407722] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Expecting reply to msg afb2d8947687433cb303a15a78dff547 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 977.416802] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg afb2d8947687433cb303a15a78dff547 [ 977.425770] env[61273]: DEBUG oslo_concurrency.lockutils [None req-84849d30-f59c-4b80-842e-01037ba3cfd2 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Lock "518bde7d-d2b2-4b53-b30c-37a7c9d29064" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 11.494s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 977.703450] env[61273]: DEBUG nova.network.neutron [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 008a11da-9a85-47d8-9731-602ae35aff64] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 977.782077] env[61273]: DEBUG nova.network.neutron [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 008a11da-9a85-47d8-9731-602ae35aff64] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 977.782077] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg b45e85ad76fb4e01be5d78725725714a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 977.789311] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b45e85ad76fb4e01be5d78725725714a [ 977.856880] env[61273]: INFO nova.compute.manager [-] [instance: 8e12a771-3033-419b-932e-131821d6e1fe] Took 1.02 seconds to deallocate network for instance. [ 977.857696] env[61273]: DEBUG nova.compute.claims [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 8e12a771-3033-419b-932e-131821d6e1fe] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 977.858170] env[61273]: DEBUG oslo_concurrency.lockutils [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 977.858538] env[61273]: DEBUG oslo_concurrency.lockutils [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 977.860678] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 8c7bc54ea5244bcb8c00f73c1a98a7de in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 977.892600] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8c7bc54ea5244bcb8c00f73c1a98a7de [ 977.910185] env[61273]: INFO nova.compute.manager [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Rebuilding instance [ 977.949211] env[61273]: DEBUG nova.compute.manager [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Checking state {{(pid=61273) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 977.950063] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bae53cc-fba1-45c2-a625-35a86748d092 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.957628] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Expecting reply to msg 29c4d48e6bfb4bc0bb080b89c774df5c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 977.990992] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 29c4d48e6bfb4bc0bb080b89c774df5c [ 978.283552] env[61273]: DEBUG oslo_concurrency.lockutils [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Releasing lock "refresh_cache-008a11da-9a85-47d8-9731-602ae35aff64" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 978.283972] env[61273]: DEBUG nova.compute.manager [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 008a11da-9a85-47d8-9731-602ae35aff64] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 978.284474] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 008a11da-9a85-47d8-9731-602ae35aff64] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 978.285203] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1eb48abe-4925-4446-a98a-580e2dc44bc1 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.294355] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d70a5884-da6d-4d28-b547-da28c9c3c193 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.315722] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 008a11da-9a85-47d8-9731-602ae35aff64] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 008a11da-9a85-47d8-9731-602ae35aff64 could not be found. [ 978.315932] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 008a11da-9a85-47d8-9731-602ae35aff64] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 978.316163] env[61273]: INFO nova.compute.manager [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 008a11da-9a85-47d8-9731-602ae35aff64] Took 0.03 seconds to destroy the instance on the hypervisor. [ 978.316420] env[61273]: DEBUG oslo.service.loopingcall [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 978.316641] env[61273]: DEBUG nova.compute.manager [-] [instance: 008a11da-9a85-47d8-9731-602ae35aff64] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 978.316735] env[61273]: DEBUG nova.network.neutron [-] [instance: 008a11da-9a85-47d8-9731-602ae35aff64] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 978.332426] env[61273]: DEBUG nova.network.neutron [-] [instance: 008a11da-9a85-47d8-9731-602ae35aff64] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 978.332938] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 2efdaef673e64ff186fc58ce7ee86aaa in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 978.339806] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2efdaef673e64ff186fc58ce7ee86aaa [ 978.408387] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7017f0e7-6552-423a-8801-7ef92d522d88 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.415642] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bd42ba2-fdf4-44f9-a2b5-3d530bd240e2 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.445328] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77ffa97b-e4a2-4ab4-b5eb-a2013ffc4c22 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.452115] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8021159-294d-4c85-beb9-a8541da39478 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.466599] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Powering off the VM {{(pid=61273) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 978.467048] env[61273]: DEBUG nova.compute.provider_tree [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 978.467491] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 31fd8726e83a499a8454cb81062fdd42 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 978.469205] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-364700a1-8da9-4aa4-b8d9-7bcf39acc97d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.475731] env[61273]: DEBUG oslo_vmware.api [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Waiting for the task: (returnval){ [ 978.475731] env[61273]: value = "task-375353" [ 978.475731] env[61273]: _type = "Task" [ 978.475731] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.476219] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 31fd8726e83a499a8454cb81062fdd42 [ 978.484355] env[61273]: DEBUG oslo_vmware.api [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Task: {'id': task-375353, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.542731] env[61273]: DEBUG nova.compute.manager [req-86fb2712-8b45-4c25-878e-8eeb98bed960 req-fdb2105f-cf6b-4ad3-b75d-49b290f58cef service nova] [instance: 008a11da-9a85-47d8-9731-602ae35aff64] Received event network-changed-fafa6f04-80b2-4750-8cda-117f48969222 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 978.542927] env[61273]: DEBUG nova.compute.manager [req-86fb2712-8b45-4c25-878e-8eeb98bed960 req-fdb2105f-cf6b-4ad3-b75d-49b290f58cef service nova] [instance: 008a11da-9a85-47d8-9731-602ae35aff64] Refreshing instance network info cache due to event network-changed-fafa6f04-80b2-4750-8cda-117f48969222. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 978.543141] env[61273]: DEBUG oslo_concurrency.lockutils [req-86fb2712-8b45-4c25-878e-8eeb98bed960 req-fdb2105f-cf6b-4ad3-b75d-49b290f58cef service nova] Acquiring lock "refresh_cache-008a11da-9a85-47d8-9731-602ae35aff64" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 978.543283] env[61273]: DEBUG oslo_concurrency.lockutils [req-86fb2712-8b45-4c25-878e-8eeb98bed960 req-fdb2105f-cf6b-4ad3-b75d-49b290f58cef service nova] Acquired lock "refresh_cache-008a11da-9a85-47d8-9731-602ae35aff64" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 978.543440] env[61273]: DEBUG nova.network.neutron [req-86fb2712-8b45-4c25-878e-8eeb98bed960 req-fdb2105f-cf6b-4ad3-b75d-49b290f58cef service nova] [instance: 008a11da-9a85-47d8-9731-602ae35aff64] Refreshing network info cache for port fafa6f04-80b2-4750-8cda-117f48969222 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 978.543868] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-86fb2712-8b45-4c25-878e-8eeb98bed960 req-fdb2105f-cf6b-4ad3-b75d-49b290f58cef service nova] Expecting reply to msg 6f1feab12a04424ab3eb164cea22edd8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 978.552904] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6f1feab12a04424ab3eb164cea22edd8 [ 978.834685] env[61273]: DEBUG nova.network.neutron [-] [instance: 008a11da-9a85-47d8-9731-602ae35aff64] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 978.835143] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg d7bf911a922044eb937d97a569981a63 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 978.843396] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d7bf911a922044eb937d97a569981a63 [ 978.972249] env[61273]: DEBUG nova.scheduler.client.report [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 978.974861] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 218b06e46aa14e9d8f9417d86f53fb64 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 978.986336] env[61273]: DEBUG oslo_vmware.api [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Task: {'id': task-375353, 'name': PowerOffVM_Task, 'duration_secs': 0.114952} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.986621] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Powered off the VM {{(pid=61273) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 978.986900] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 978.987666] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea875018-72a9-466b-8db7-d689550885d0 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.990522] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 218b06e46aa14e9d8f9417d86f53fb64 [ 978.995154] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Unregistering the VM {{(pid=61273) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 978.995383] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-858666db-7302-4866-87a5-268784f17e50 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.022825] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Unregistered the VM {{(pid=61273) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 979.023168] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Deleting contents of the VM from datastore datastore1 {{(pid=61273) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 979.023376] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Deleting the datastore file [datastore1] 518bde7d-d2b2-4b53-b30c-37a7c9d29064 {{(pid=61273) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 979.023629] env[61273]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-734d6136-9ab5-4ba4-bc85-1b9d8aa5dda0 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.029470] env[61273]: DEBUG oslo_vmware.api [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Waiting for the task: (returnval){ [ 979.029470] env[61273]: value = "task-375355" [ 979.029470] env[61273]: _type = "Task" [ 979.029470] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.036650] env[61273]: DEBUG oslo_vmware.api [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Task: {'id': task-375355, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.060560] env[61273]: DEBUG nova.network.neutron [req-86fb2712-8b45-4c25-878e-8eeb98bed960 req-fdb2105f-cf6b-4ad3-b75d-49b290f58cef service nova] [instance: 008a11da-9a85-47d8-9731-602ae35aff64] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 979.123347] env[61273]: DEBUG nova.network.neutron [req-86fb2712-8b45-4c25-878e-8eeb98bed960 req-fdb2105f-cf6b-4ad3-b75d-49b290f58cef service nova] [instance: 008a11da-9a85-47d8-9731-602ae35aff64] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 979.123744] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-86fb2712-8b45-4c25-878e-8eeb98bed960 req-fdb2105f-cf6b-4ad3-b75d-49b290f58cef service nova] Expecting reply to msg 63f002f6a31e4f459fe7216ea7958783 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 979.131947] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 63f002f6a31e4f459fe7216ea7958783 [ 979.337439] env[61273]: INFO nova.compute.manager [-] [instance: 008a11da-9a85-47d8-9731-602ae35aff64] Took 1.02 seconds to deallocate network for instance. [ 979.339827] env[61273]: DEBUG nova.compute.claims [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 008a11da-9a85-47d8-9731-602ae35aff64] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 979.340050] env[61273]: DEBUG oslo_concurrency.lockutils [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 979.477828] env[61273]: DEBUG oslo_concurrency.lockutils [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.619s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 979.478479] env[61273]: ERROR nova.compute.manager [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 8e12a771-3033-419b-932e-131821d6e1fe] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 569cb115-6d92-4aca-968c-f05be2b180c7, please check neutron logs for more information. [ 979.478479] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] Traceback (most recent call last): [ 979.478479] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 979.478479] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] self.driver.spawn(context, instance, image_meta, [ 979.478479] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 979.478479] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] self._vmops.spawn(context, instance, image_meta, injected_files, [ 979.478479] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 979.478479] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] vm_ref = self.build_virtual_machine(instance, [ 979.478479] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 979.478479] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] vif_infos = vmwarevif.get_vif_info(self._session, [ 979.478479] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 979.479079] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] for vif in network_info: [ 979.479079] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 979.479079] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] return self._sync_wrapper(fn, *args, **kwargs) [ 979.479079] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 979.479079] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] self.wait() [ 979.479079] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 979.479079] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] self[:] = self._gt.wait() [ 979.479079] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 979.479079] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] return self._exit_event.wait() [ 979.479079] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 979.479079] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] current.throw(*self._exc) [ 979.479079] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 979.479079] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] result = function(*args, **kwargs) [ 979.479716] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 979.479716] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] return func(*args, **kwargs) [ 979.479716] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 979.479716] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] raise e [ 979.479716] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 979.479716] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] nwinfo = self.network_api.allocate_for_instance( [ 979.479716] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 979.479716] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] created_port_ids = self._update_ports_for_instance( [ 979.479716] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 979.479716] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] with excutils.save_and_reraise_exception(): [ 979.479716] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 979.479716] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] self.force_reraise() [ 979.479716] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 979.480468] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] raise self.value [ 979.480468] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 979.480468] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] updated_port = self._update_port( [ 979.480468] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 979.480468] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] _ensure_no_port_binding_failure(port) [ 979.480468] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 979.480468] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] raise exception.PortBindingFailed(port_id=port['id']) [ 979.480468] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] nova.exception.PortBindingFailed: Binding failed for port 569cb115-6d92-4aca-968c-f05be2b180c7, please check neutron logs for more information. [ 979.480468] env[61273]: ERROR nova.compute.manager [instance: 8e12a771-3033-419b-932e-131821d6e1fe] [ 979.480468] env[61273]: DEBUG nova.compute.utils [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 8e12a771-3033-419b-932e-131821d6e1fe] Binding failed for port 569cb115-6d92-4aca-968c-f05be2b180c7, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 979.480944] env[61273]: DEBUG oslo_concurrency.lockutils [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.140s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 979.482333] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 35fffbece1f94a3aa03f321a824a7e71 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 979.483475] env[61273]: DEBUG nova.compute.manager [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 8e12a771-3033-419b-932e-131821d6e1fe] Build of instance 8e12a771-3033-419b-932e-131821d6e1fe was re-scheduled: Binding failed for port 569cb115-6d92-4aca-968c-f05be2b180c7, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 979.483885] env[61273]: DEBUG nova.compute.manager [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 8e12a771-3033-419b-932e-131821d6e1fe] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 979.484120] env[61273]: DEBUG oslo_concurrency.lockutils [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Acquiring lock "refresh_cache-8e12a771-3033-419b-932e-131821d6e1fe" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 979.484266] env[61273]: DEBUG oslo_concurrency.lockutils [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Acquired lock "refresh_cache-8e12a771-3033-419b-932e-131821d6e1fe" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 979.484424] env[61273]: DEBUG nova.network.neutron [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 8e12a771-3033-419b-932e-131821d6e1fe] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 979.484780] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg bfa17add311d42468440b65a2966b711 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 979.491372] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bfa17add311d42468440b65a2966b711 [ 979.512023] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 35fffbece1f94a3aa03f321a824a7e71 [ 979.538824] env[61273]: DEBUG oslo_vmware.api [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Task: {'id': task-375355, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.626150] env[61273]: DEBUG oslo_concurrency.lockutils [req-86fb2712-8b45-4c25-878e-8eeb98bed960 req-fdb2105f-cf6b-4ad3-b75d-49b290f58cef service nova] Releasing lock "refresh_cache-008a11da-9a85-47d8-9731-602ae35aff64" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 979.626410] env[61273]: DEBUG nova.compute.manager [req-86fb2712-8b45-4c25-878e-8eeb98bed960 req-fdb2105f-cf6b-4ad3-b75d-49b290f58cef service nova] [instance: 008a11da-9a85-47d8-9731-602ae35aff64] Received event network-vif-deleted-fafa6f04-80b2-4750-8cda-117f48969222 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 980.005585] env[61273]: DEBUG nova.network.neutron [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 8e12a771-3033-419b-932e-131821d6e1fe] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 980.042349] env[61273]: DEBUG oslo_vmware.api [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Task: {'id': task-375355, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.764295} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.042662] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Deleted the datastore file {{(pid=61273) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 980.042914] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Deleted contents of the VM from datastore datastore1 {{(pid=61273) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 980.043127] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 980.044745] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Expecting reply to msg bf49669b7cb54362be5701991ae0d182 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 980.046782] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2f5ec10-2964-45bf-90dd-1fc5dfcd5cdd {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.053335] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aab5237c-bc26-421c-a5ff-30490ced3704 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.083255] env[61273]: DEBUG nova.network.neutron [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 8e12a771-3033-419b-932e-131821d6e1fe] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 980.083749] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg e7f0a081b09049ce93895039074ab1ec in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 980.084904] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bf49669b7cb54362be5701991ae0d182 [ 980.085728] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b284ace-2213-4772-ba09-1a7f1c374b9c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.092344] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e7f0a081b09049ce93895039074ab1ec [ 980.093662] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9164dc65-a5e2-4622-8897-d97f22cfaf3d {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.106817] env[61273]: DEBUG nova.compute.provider_tree [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 980.107238] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 91920c9362de45839aced5a05f13dd65 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 980.113614] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 91920c9362de45839aced5a05f13dd65 [ 980.552119] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Expecting reply to msg 4e6b9ccb0ca2492289c53393c1a1be43 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 980.582184] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4e6b9ccb0ca2492289c53393c1a1be43 [ 980.589508] env[61273]: DEBUG oslo_concurrency.lockutils [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Releasing lock "refresh_cache-8e12a771-3033-419b-932e-131821d6e1fe" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 980.589749] env[61273]: DEBUG nova.compute.manager [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 980.589906] env[61273]: DEBUG nova.compute.manager [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 8e12a771-3033-419b-932e-131821d6e1fe] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 980.590070] env[61273]: DEBUG nova.network.neutron [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 8e12a771-3033-419b-932e-131821d6e1fe] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 980.605813] env[61273]: DEBUG nova.network.neutron [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 8e12a771-3033-419b-932e-131821d6e1fe] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 980.606534] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 7ad9bf38834f4013b70d1e3fe414ebcf in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 980.609971] env[61273]: DEBUG nova.scheduler.client.report [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 980.613546] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 3ee35df39ebc47cd99130b5fa04191a8 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 980.614996] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7ad9bf38834f4013b70d1e3fe414ebcf [ 980.625367] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3ee35df39ebc47cd99130b5fa04191a8 [ 981.077622] env[61273]: DEBUG nova.virt.hardware [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 981.077873] env[61273]: DEBUG nova.virt.hardware [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 981.078031] env[61273]: DEBUG nova.virt.hardware [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 981.078209] env[61273]: DEBUG nova.virt.hardware [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 981.078353] env[61273]: DEBUG nova.virt.hardware [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 981.078500] env[61273]: DEBUG nova.virt.hardware [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 981.078705] env[61273]: DEBUG nova.virt.hardware [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 981.078862] env[61273]: DEBUG nova.virt.hardware [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 981.079029] env[61273]: DEBUG nova.virt.hardware [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 981.079191] env[61273]: DEBUG nova.virt.hardware [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 981.079368] env[61273]: DEBUG nova.virt.hardware [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 981.080301] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6082a94d-90ac-4509-afaf-70e9aead9c58 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.088461] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28c01395-1686-4254-9393-56bd70656194 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.104161] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Instance VIF info [] {{(pid=61273) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 981.109855] env[61273]: DEBUG oslo.service.loopingcall [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 981.110236] env[61273]: DEBUG nova.network.neutron [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 8e12a771-3033-419b-932e-131821d6e1fe] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 981.110732] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg d470ede7e1634486aebd36e890983540 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 981.111554] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Creating VM on the ESX host {{(pid=61273) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 981.112443] env[61273]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-46d3bc33-19fd-4284-bdda-a0cb44358b17 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.123970] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d470ede7e1634486aebd36e890983540 [ 981.124834] env[61273]: DEBUG oslo_concurrency.lockutils [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.644s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 981.125553] env[61273]: ERROR nova.compute.manager [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 008a11da-9a85-47d8-9731-602ae35aff64] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port fafa6f04-80b2-4750-8cda-117f48969222, please check neutron logs for more information. [ 981.125553] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] Traceback (most recent call last): [ 981.125553] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 981.125553] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] self.driver.spawn(context, instance, image_meta, [ 981.125553] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 981.125553] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] self._vmops.spawn(context, instance, image_meta, injected_files, [ 981.125553] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 981.125553] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] vm_ref = self.build_virtual_machine(instance, [ 981.125553] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 981.125553] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] vif_infos = vmwarevif.get_vif_info(self._session, [ 981.125553] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 981.125972] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] for vif in network_info: [ 981.125972] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 981.125972] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] return self._sync_wrapper(fn, *args, **kwargs) [ 981.125972] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 981.125972] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] self.wait() [ 981.125972] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 981.125972] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] self[:] = self._gt.wait() [ 981.125972] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 981.125972] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] return self._exit_event.wait() [ 981.125972] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 981.125972] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] current.throw(*self._exc) [ 981.125972] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 981.125972] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] result = function(*args, **kwargs) [ 981.126362] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 981.126362] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] return func(*args, **kwargs) [ 981.126362] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 981.126362] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] raise e [ 981.126362] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 981.126362] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] nwinfo = self.network_api.allocate_for_instance( [ 981.126362] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 981.126362] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] created_port_ids = self._update_ports_for_instance( [ 981.126362] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 981.126362] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] with excutils.save_and_reraise_exception(): [ 981.126362] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 981.126362] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] self.force_reraise() [ 981.126362] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 981.126743] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] raise self.value [ 981.126743] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 981.126743] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] updated_port = self._update_port( [ 981.126743] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 981.126743] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] _ensure_no_port_binding_failure(port) [ 981.126743] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 981.126743] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] raise exception.PortBindingFailed(port_id=port['id']) [ 981.126743] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] nova.exception.PortBindingFailed: Binding failed for port fafa6f04-80b2-4750-8cda-117f48969222, please check neutron logs for more information. [ 981.126743] env[61273]: ERROR nova.compute.manager [instance: 008a11da-9a85-47d8-9731-602ae35aff64] [ 981.126743] env[61273]: DEBUG nova.compute.utils [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 008a11da-9a85-47d8-9731-602ae35aff64] Binding failed for port fafa6f04-80b2-4750-8cda-117f48969222, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 981.128117] env[61273]: DEBUG nova.compute.manager [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 008a11da-9a85-47d8-9731-602ae35aff64] Build of instance 008a11da-9a85-47d8-9731-602ae35aff64 was re-scheduled: Binding failed for port fafa6f04-80b2-4750-8cda-117f48969222, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 981.128540] env[61273]: DEBUG nova.compute.manager [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 008a11da-9a85-47d8-9731-602ae35aff64] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 981.128866] env[61273]: DEBUG oslo_concurrency.lockutils [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Acquiring lock "refresh_cache-008a11da-9a85-47d8-9731-602ae35aff64" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 981.129116] env[61273]: DEBUG oslo_concurrency.lockutils [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Acquired lock "refresh_cache-008a11da-9a85-47d8-9731-602ae35aff64" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 981.129439] env[61273]: DEBUG nova.network.neutron [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 008a11da-9a85-47d8-9731-602ae35aff64] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 981.129961] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 2a72e74fabf244fb8fb9afc64154b3a2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 981.132896] env[61273]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 981.132896] env[61273]: value = "task-375356" [ 981.132896] env[61273]: _type = "Task" [ 981.132896] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.136511] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2a72e74fabf244fb8fb9afc64154b3a2 [ 981.142346] env[61273]: DEBUG oslo_vmware.api [-] Task: {'id': task-375356, 'name': CreateVM_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.624987] env[61273]: INFO nova.compute.manager [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 8e12a771-3033-419b-932e-131821d6e1fe] Took 1.03 seconds to deallocate network for instance. [ 981.627742] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg e728bcfd8c454f37bfd95e25f599b16c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 981.647778] env[61273]: DEBUG oslo_vmware.api [-] Task: {'id': task-375356, 'name': CreateVM_Task, 'duration_secs': 0.249673} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.648042] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Created VM on the ESX host {{(pid=61273) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 981.648621] env[61273]: DEBUG oslo_concurrency.lockutils [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 981.648880] env[61273]: DEBUG oslo_concurrency.lockutils [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 981.649339] env[61273]: DEBUG oslo_concurrency.lockutils [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 981.649737] env[61273]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4cf4958e-790a-4274-8aef-357380df3711 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.654735] env[61273]: DEBUG nova.network.neutron [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 008a11da-9a85-47d8-9731-602ae35aff64] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 981.659714] env[61273]: DEBUG oslo_vmware.api [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Waiting for the task: (returnval){ [ 981.659714] env[61273]: value = "session[527e49f3-ce92-061c-8407-29c8a2392124]52dab8f4-fcb4-a803-9b88-3e1297b6da66" [ 981.659714] env[61273]: _type = "Task" [ 981.659714] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.665385] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e728bcfd8c454f37bfd95e25f599b16c [ 981.673508] env[61273]: DEBUG oslo_vmware.api [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]52dab8f4-fcb4-a803-9b88-3e1297b6da66, 'name': SearchDatastore_Task, 'duration_secs': 0.010839} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.673927] env[61273]: DEBUG oslo_concurrency.lockutils [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 981.674272] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Processing image 4a9e718e-a6a1-4b4a-b567-8e55529b2d5b {{(pid=61273) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 981.674628] env[61273]: DEBUG oslo_concurrency.lockutils [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 981.674866] env[61273]: DEBUG oslo_concurrency.lockutils [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 981.675149] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61273) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 981.675514] env[61273]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-528915e6-9f79-465b-8491-da873f731ef1 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.685000] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61273) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 981.685305] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61273) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 981.686423] env[61273]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98e8c813-79a2-4a43-90a6-146178d62de4 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.693638] env[61273]: DEBUG oslo_vmware.api [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Waiting for the task: (returnval){ [ 981.693638] env[61273]: value = "session[527e49f3-ce92-061c-8407-29c8a2392124]526d3ec4-dcd0-77fb-63a4-03826b1035bb" [ 981.693638] env[61273]: _type = "Task" [ 981.693638] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.701220] env[61273]: DEBUG oslo_vmware.api [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]526d3ec4-dcd0-77fb-63a4-03826b1035bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.745115] env[61273]: DEBUG nova.network.neutron [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 008a11da-9a85-47d8-9731-602ae35aff64] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 981.745653] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 715b17be139343a9820607738c2252ed in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 981.753323] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 715b17be139343a9820607738c2252ed [ 982.133068] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 1cec12a1d1314e2996da150e6c929d33 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 982.164250] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1cec12a1d1314e2996da150e6c929d33 [ 982.203556] env[61273]: DEBUG oslo_vmware.api [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]526d3ec4-dcd0-77fb-63a4-03826b1035bb, 'name': SearchDatastore_Task, 'duration_secs': 0.008338} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.204323] env[61273]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d4703f2-7fdc-4a6f-a6ba-e0a1e4ba75ba {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.209161] env[61273]: DEBUG oslo_vmware.api [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Waiting for the task: (returnval){ [ 982.209161] env[61273]: value = "session[527e49f3-ce92-061c-8407-29c8a2392124]5299a8e6-219c-c092-7d76-5b18929fa6c3" [ 982.209161] env[61273]: _type = "Task" [ 982.209161] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.216095] env[61273]: DEBUG oslo_vmware.api [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]5299a8e6-219c-c092-7d76-5b18929fa6c3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.247983] env[61273]: DEBUG oslo_concurrency.lockutils [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Releasing lock "refresh_cache-008a11da-9a85-47d8-9731-602ae35aff64" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 982.248180] env[61273]: DEBUG nova.compute.manager [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 982.248363] env[61273]: DEBUG nova.compute.manager [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 008a11da-9a85-47d8-9731-602ae35aff64] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 982.248528] env[61273]: DEBUG nova.network.neutron [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 008a11da-9a85-47d8-9731-602ae35aff64] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 982.262603] env[61273]: DEBUG nova.network.neutron [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 008a11da-9a85-47d8-9731-602ae35aff64] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 982.263146] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 0e7165798b644a429a25234eb81fe291 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 982.269484] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0e7165798b644a429a25234eb81fe291 [ 982.656101] env[61273]: INFO nova.scheduler.client.report [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Deleted allocations for instance 8e12a771-3033-419b-932e-131821d6e1fe [ 982.662355] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 0498382b20e64c4bbe46074997bc150a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 982.671384] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0498382b20e64c4bbe46074997bc150a [ 982.719610] env[61273]: DEBUG oslo_vmware.api [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Task: {'id': session[527e49f3-ce92-061c-8407-29c8a2392124]5299a8e6-219c-c092-7d76-5b18929fa6c3, 'name': SearchDatastore_Task, 'duration_secs': 0.009229} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.720135] env[61273]: DEBUG oslo_concurrency.lockutils [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 982.720769] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk to [datastore2] 518bde7d-d2b2-4b53-b30c-37a7c9d29064/518bde7d-d2b2-4b53-b30c-37a7c9d29064.vmdk {{(pid=61273) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 982.721222] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-84f21644-e275-4a78-97b0-e0811d73aa89 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.728065] env[61273]: DEBUG oslo_vmware.api [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Waiting for the task: (returnval){ [ 982.728065] env[61273]: value = "task-375357" [ 982.728065] env[61273]: _type = "Task" [ 982.728065] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.737199] env[61273]: DEBUG oslo_vmware.api [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Task: {'id': task-375357, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.765276] env[61273]: DEBUG nova.network.neutron [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 008a11da-9a85-47d8-9731-602ae35aff64] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 982.766017] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 6104bce964ab4e74b8d94a7dadbdc381 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 982.774472] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6104bce964ab4e74b8d94a7dadbdc381 [ 983.163711] env[61273]: DEBUG oslo_concurrency.lockutils [None req-70a66f56-f283-4b88-a63c-89f803cca1fe tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Lock "8e12a771-3033-419b-932e-131821d6e1fe" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.379s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 983.237347] env[61273]: DEBUG oslo_vmware.api [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Task: {'id': task-375357, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.460014} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.237610] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b/4a9e718e-a6a1-4b4a-b567-8e55529b2d5b.vmdk to [datastore2] 518bde7d-d2b2-4b53-b30c-37a7c9d29064/518bde7d-d2b2-4b53-b30c-37a7c9d29064.vmdk {{(pid=61273) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 983.237820] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Extending root virtual disk to 1048576 {{(pid=61273) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 983.238148] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a4325b5b-ef5b-4169-866f-b528abc0a157 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.243627] env[61273]: DEBUG oslo_vmware.api [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Waiting for the task: (returnval){ [ 983.243627] env[61273]: value = "task-375358" [ 983.243627] env[61273]: _type = "Task" [ 983.243627] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.250306] env[61273]: DEBUG oslo_vmware.api [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Task: {'id': task-375358, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.267925] env[61273]: INFO nova.compute.manager [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: 008a11da-9a85-47d8-9731-602ae35aff64] Took 1.02 seconds to deallocate network for instance. [ 983.269607] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg dedf1ca76cf84742936c34d9025d9fba in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 983.303865] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dedf1ca76cf84742936c34d9025d9fba [ 983.753638] env[61273]: DEBUG oslo_vmware.api [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Task: {'id': task-375358, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.058718} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.754312] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Extended root virtual disk {{(pid=61273) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 983.755175] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1486e572-ae10-47c8-8555-da7cb994cd71 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.776485] env[61273]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Reconfiguring VM instance instance-00000060 to attach disk [datastore2] 518bde7d-d2b2-4b53-b30c-37a7c9d29064/518bde7d-d2b2-4b53-b30c-37a7c9d29064.vmdk or device None with type sparse {{(pid=61273) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 983.779622] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg e12e5ba2e4a144e2876b9c314a1e7597 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 983.780681] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e37646a5-b69a-4e28-a2f1-f94f6a9110a7 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.802341] env[61273]: DEBUG oslo_vmware.api [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Waiting for the task: (returnval){ [ 983.802341] env[61273]: value = "task-375359" [ 983.802341] env[61273]: _type = "Task" [ 983.802341] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.811180] env[61273]: DEBUG oslo_vmware.api [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Task: {'id': task-375359, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.811782] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e12e5ba2e4a144e2876b9c314a1e7597 [ 984.312253] env[61273]: DEBUG oslo_vmware.api [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Task: {'id': task-375359, 'name': ReconfigVM_Task, 'duration_secs': 0.265121} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.312539] env[61273]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Reconfigured VM instance instance-00000060 to attach disk [datastore2] 518bde7d-d2b2-4b53-b30c-37a7c9d29064/518bde7d-d2b2-4b53-b30c-37a7c9d29064.vmdk or device None with type sparse {{(pid=61273) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 984.313223] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0e9a5c4e-9dea-4db5-a459-837c6bab0705 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.316404] env[61273]: INFO nova.scheduler.client.report [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Deleted allocations for instance 008a11da-9a85-47d8-9731-602ae35aff64 [ 984.323097] env[61273]: DEBUG oslo_vmware.api [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Waiting for the task: (returnval){ [ 984.323097] env[61273]: value = "task-375360" [ 984.323097] env[61273]: _type = "Task" [ 984.323097] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.323668] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 540390c51d2348f5931f406982b7c311 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 984.332471] env[61273]: DEBUG oslo_vmware.api [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Task: {'id': task-375360, 'name': Rename_Task} progress is 6%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.335099] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 540390c51d2348f5931f406982b7c311 [ 984.398589] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Acquiring lock "69e7aa90-75af-46ea-95ca-bca19e36dfec" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 984.398821] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Lock "69e7aa90-75af-46ea-95ca-bca19e36dfec" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 984.399270] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 2f04aae8ac014f51a84e467953a5dda6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 984.409584] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2f04aae8ac014f51a84e467953a5dda6 [ 984.828404] env[61273]: DEBUG oslo_concurrency.lockutils [None req-fc1558ec-d221-4e7e-918f-d923a306b490 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Lock "008a11da-9a85-47d8-9731-602ae35aff64" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.369s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 984.834129] env[61273]: DEBUG oslo_vmware.api [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Task: {'id': task-375360, 'name': Rename_Task, 'duration_secs': 0.123567} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.834404] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Powering on the VM {{(pid=61273) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 984.834644] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e3fc7a03-738d-450a-b146-bec87d0efac9 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.840672] env[61273]: DEBUG oslo_vmware.api [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Waiting for the task: (returnval){ [ 984.840672] env[61273]: value = "task-375361" [ 984.840672] env[61273]: _type = "Task" [ 984.840672] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.850353] env[61273]: DEBUG oslo_vmware.api [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Task: {'id': task-375361, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.901821] env[61273]: DEBUG nova.compute.manager [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 984.903714] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 1a7cbc73fa1f492eafdd841c8aa124e1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 984.939494] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1a7cbc73fa1f492eafdd841c8aa124e1 [ 985.353388] env[61273]: DEBUG oslo_vmware.api [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Task: {'id': task-375361, 'name': PowerOnVM_Task, 'duration_secs': 0.406842} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.353634] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Powered on the VM {{(pid=61273) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 985.353847] env[61273]: DEBUG nova.compute.manager [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Checking state {{(pid=61273) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 985.354600] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5b2cc13-a095-4c4f-933e-dc5bfa8b2f6f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.361930] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Expecting reply to msg ffd1d9ef4cbd4252ad4820fbecd6b7db in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 985.400743] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ffd1d9ef4cbd4252ad4820fbecd6b7db [ 985.430124] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 985.430382] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 985.431856] env[61273]: INFO nova.compute.claims [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 985.433751] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 0ba6d211860440718d4c71b2038c9a2f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 985.464931] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0ba6d211860440718d4c71b2038c9a2f [ 985.871205] env[61273]: DEBUG oslo_concurrency.lockutils [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 985.937191] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 03d70d39efb24abc94c08d193876b52b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 985.947940] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 03d70d39efb24abc94c08d193876b52b [ 986.042601] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Acquiring lock "f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 986.042904] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Lock "f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 986.043452] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 9af8bb6d73084edfb683693a2205822a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 986.052959] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9af8bb6d73084edfb683693a2205822a [ 986.062964] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-98ac3efa-f7bb-4ea6-b8cf-447e97252908 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Expecting reply to msg 45cde44668194cdaac6212d0aa10c646 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 986.071907] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 45cde44668194cdaac6212d0aa10c646 [ 986.489209] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abf77d08-196a-4dfc-a025-b707592715b7 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.496467] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c27081a0-6fae-4b1e-817e-8c4330e76827 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.529481] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c3e3b5c-a7f5-4039-b968-28ed3faeae1a {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.536749] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10ddb53a-4440-4482-8ff4-95b019733b92 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.549680] env[61273]: DEBUG nova.compute.manager [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 986.551512] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 4cb1e61e6c874cbabdb2562e0641bbc2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 986.552821] env[61273]: DEBUG nova.compute.provider_tree [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 986.553440] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 65a02a8d2ff349aa98f595f33cf22809 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 986.560127] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 65a02a8d2ff349aa98f595f33cf22809 [ 986.564274] env[61273]: DEBUG oslo_concurrency.lockutils [None req-98ac3efa-f7bb-4ea6-b8cf-447e97252908 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Acquiring lock "518bde7d-d2b2-4b53-b30c-37a7c9d29064" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 986.564705] env[61273]: DEBUG oslo_concurrency.lockutils [None req-98ac3efa-f7bb-4ea6-b8cf-447e97252908 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Lock "518bde7d-d2b2-4b53-b30c-37a7c9d29064" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 986.565055] env[61273]: DEBUG oslo_concurrency.lockutils [None req-98ac3efa-f7bb-4ea6-b8cf-447e97252908 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Acquiring lock "518bde7d-d2b2-4b53-b30c-37a7c9d29064-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 986.565397] env[61273]: DEBUG oslo_concurrency.lockutils [None req-98ac3efa-f7bb-4ea6-b8cf-447e97252908 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Lock "518bde7d-d2b2-4b53-b30c-37a7c9d29064-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 986.565701] env[61273]: DEBUG oslo_concurrency.lockutils [None req-98ac3efa-f7bb-4ea6-b8cf-447e97252908 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Lock "518bde7d-d2b2-4b53-b30c-37a7c9d29064-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 986.567542] env[61273]: INFO nova.compute.manager [None req-98ac3efa-f7bb-4ea6-b8cf-447e97252908 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Terminating instance [ 986.569274] env[61273]: DEBUG oslo_concurrency.lockutils [None req-98ac3efa-f7bb-4ea6-b8cf-447e97252908 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Acquiring lock "refresh_cache-518bde7d-d2b2-4b53-b30c-37a7c9d29064" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 986.569591] env[61273]: DEBUG oslo_concurrency.lockutils [None req-98ac3efa-f7bb-4ea6-b8cf-447e97252908 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Acquired lock "refresh_cache-518bde7d-d2b2-4b53-b30c-37a7c9d29064" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 986.569967] env[61273]: DEBUG nova.network.neutron [None req-98ac3efa-f7bb-4ea6-b8cf-447e97252908 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 986.570470] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-98ac3efa-f7bb-4ea6-b8cf-447e97252908 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Expecting reply to msg 32672ffa58834691a566218bcc26e18b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 986.579743] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 32672ffa58834691a566218bcc26e18b [ 986.584307] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4cb1e61e6c874cbabdb2562e0641bbc2 [ 987.057679] env[61273]: DEBUG nova.scheduler.client.report [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 987.059952] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 51cb7a2201a84556890cccaea6fffe17 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 987.073802] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 51cb7a2201a84556890cccaea6fffe17 [ 987.074902] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 987.091518] env[61273]: DEBUG nova.network.neutron [None req-98ac3efa-f7bb-4ea6-b8cf-447e97252908 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 987.148904] env[61273]: DEBUG nova.network.neutron [None req-98ac3efa-f7bb-4ea6-b8cf-447e97252908 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 987.149428] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-98ac3efa-f7bb-4ea6-b8cf-447e97252908 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Expecting reply to msg 4003da62846b4305862368f2c65c7827 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 987.158149] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4003da62846b4305862368f2c65c7827 [ 987.564443] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.134s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 987.565238] env[61273]: DEBUG nova.compute.manager [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 987.567095] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 3435a99739204d869936bb6e1795571d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 987.568338] env[61273]: DEBUG oslo_concurrency.lockutils [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 1.697s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 987.568726] env[61273]: DEBUG nova.objects.instance [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61273) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 987.570333] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Expecting reply to msg e8faec70f9cf4f28b003bd49ebeaf3c5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 987.598127] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3435a99739204d869936bb6e1795571d [ 987.598824] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e8faec70f9cf4f28b003bd49ebeaf3c5 [ 987.651791] env[61273]: DEBUG oslo_concurrency.lockutils [None req-98ac3efa-f7bb-4ea6-b8cf-447e97252908 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Releasing lock "refresh_cache-518bde7d-d2b2-4b53-b30c-37a7c9d29064" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 987.652290] env[61273]: DEBUG nova.compute.manager [None req-98ac3efa-f7bb-4ea6-b8cf-447e97252908 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 987.652504] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-98ac3efa-f7bb-4ea6-b8cf-447e97252908 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 987.653466] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfa6e8d7-13e7-4fdd-a326-4ca9d5f5a9c5 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.661329] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-98ac3efa-f7bb-4ea6-b8cf-447e97252908 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Powering off the VM {{(pid=61273) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 987.661580] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a9d1e0d7-173e-4c1a-906f-98de00ec5675 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.668983] env[61273]: DEBUG oslo_vmware.api [None req-98ac3efa-f7bb-4ea6-b8cf-447e97252908 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Waiting for the task: (returnval){ [ 987.668983] env[61273]: value = "task-375362" [ 987.668983] env[61273]: _type = "Task" [ 987.668983] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.676406] env[61273]: DEBUG oslo_vmware.api [None req-98ac3efa-f7bb-4ea6-b8cf-447e97252908 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Task: {'id': task-375362, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.073589] env[61273]: DEBUG nova.compute.utils [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 988.074256] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg f22686c87b824d1b84d7895ec159340c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 988.076930] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Expecting reply to msg 0977354b9838454aa7081182b55ccca9 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 988.078369] env[61273]: DEBUG nova.compute.manager [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 988.078369] env[61273]: DEBUG nova.network.neutron [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 988.083586] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0977354b9838454aa7081182b55ccca9 [ 988.084868] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f22686c87b824d1b84d7895ec159340c [ 988.111957] env[61273]: DEBUG nova.policy [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'af34c4e3d81c4729a9dd4a8531992ff1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd9325f1def284d2a9fdced4e9eeb17f0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 988.178799] env[61273]: DEBUG oslo_vmware.api [None req-98ac3efa-f7bb-4ea6-b8cf-447e97252908 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Task: {'id': task-375362, 'name': PowerOffVM_Task, 'duration_secs': 0.293561} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.179952] env[61273]: DEBUG nova.virt.vmwareapi.vm_util [None req-98ac3efa-f7bb-4ea6-b8cf-447e97252908 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Powered off the VM {{(pid=61273) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 988.179952] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-98ac3efa-f7bb-4ea6-b8cf-447e97252908 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Unregistering the VM {{(pid=61273) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 988.179952] env[61273]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f29351b0-1ed1-4a73-8eb7-a404f5b4fca9 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.205563] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-98ac3efa-f7bb-4ea6-b8cf-447e97252908 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Unregistered the VM {{(pid=61273) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 988.205563] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-98ac3efa-f7bb-4ea6-b8cf-447e97252908 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Deleting contents of the VM from datastore datastore2 {{(pid=61273) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 988.205563] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-98ac3efa-f7bb-4ea6-b8cf-447e97252908 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Deleting the datastore file [datastore2] 518bde7d-d2b2-4b53-b30c-37a7c9d29064 {{(pid=61273) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 988.205815] env[61273]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f886b9a3-fb83-4f4f-aa93-f9a4f3f84a40 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.211944] env[61273]: DEBUG oslo_vmware.api [None req-98ac3efa-f7bb-4ea6-b8cf-447e97252908 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Waiting for the task: (returnval){ [ 988.211944] env[61273]: value = "task-375364" [ 988.211944] env[61273]: _type = "Task" [ 988.211944] env[61273]: } to complete. {{(pid=61273) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.219578] env[61273]: DEBUG oslo_vmware.api [None req-98ac3efa-f7bb-4ea6-b8cf-447e97252908 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Task: {'id': task-375364, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.364881] env[61273]: DEBUG nova.network.neutron [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] Successfully created port: 43474987-c405-46c0-8e73-9c430b8fa49a {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 988.578991] env[61273]: DEBUG nova.compute.manager [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 988.580771] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 89a3fefe23594d96a23f9e702a8b66fe in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 988.582372] env[61273]: DEBUG oslo_concurrency.lockutils [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 988.582682] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-fd221f61-2dfa-44ae-91e8-a689db29b23d tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Expecting reply to msg ee9b13c436534e43b78349f3134e2c92 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 988.583392] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.509s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 988.584690] env[61273]: INFO nova.compute.claims [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 988.588696] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg bfc95ef4d1d946c38220febf372ff0b0 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 988.601633] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ee9b13c436534e43b78349f3134e2c92 [ 988.636646] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 89a3fefe23594d96a23f9e702a8b66fe [ 988.641341] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bfc95ef4d1d946c38220febf372ff0b0 [ 988.721695] env[61273]: DEBUG oslo_vmware.api [None req-98ac3efa-f7bb-4ea6-b8cf-447e97252908 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Task: {'id': task-375364, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.088537} completed successfully. {{(pid=61273) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.722120] env[61273]: DEBUG nova.virt.vmwareapi.ds_util [None req-98ac3efa-f7bb-4ea6-b8cf-447e97252908 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Deleted the datastore file {{(pid=61273) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 988.722325] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-98ac3efa-f7bb-4ea6-b8cf-447e97252908 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Deleted contents of the VM from datastore datastore2 {{(pid=61273) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 988.722505] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-98ac3efa-f7bb-4ea6-b8cf-447e97252908 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 988.722799] env[61273]: INFO nova.compute.manager [None req-98ac3efa-f7bb-4ea6-b8cf-447e97252908 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Took 1.07 seconds to destroy the instance on the hypervisor. [ 988.723123] env[61273]: DEBUG oslo.service.loopingcall [None req-98ac3efa-f7bb-4ea6-b8cf-447e97252908 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 988.723323] env[61273]: DEBUG nova.compute.manager [-] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 988.723414] env[61273]: DEBUG nova.network.neutron [-] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 988.738292] env[61273]: DEBUG nova.network.neutron [-] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 988.738961] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 40031128e05149cf9135d67ce63a7528 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 988.747400] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 40031128e05149cf9135d67ce63a7528 [ 988.938505] env[61273]: DEBUG nova.compute.manager [req-ac189e59-bd92-4299-9a51-e3d4702988a0 req-5d094f50-dba0-4664-8b67-828d03142b2a service nova] [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] Received event network-changed-43474987-c405-46c0-8e73-9c430b8fa49a {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 988.938693] env[61273]: DEBUG nova.compute.manager [req-ac189e59-bd92-4299-9a51-e3d4702988a0 req-5d094f50-dba0-4664-8b67-828d03142b2a service nova] [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] Refreshing instance network info cache due to event network-changed-43474987-c405-46c0-8e73-9c430b8fa49a. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 988.938920] env[61273]: DEBUG oslo_concurrency.lockutils [req-ac189e59-bd92-4299-9a51-e3d4702988a0 req-5d094f50-dba0-4664-8b67-828d03142b2a service nova] Acquiring lock "refresh_cache-69e7aa90-75af-46ea-95ca-bca19e36dfec" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 988.939061] env[61273]: DEBUG oslo_concurrency.lockutils [req-ac189e59-bd92-4299-9a51-e3d4702988a0 req-5d094f50-dba0-4664-8b67-828d03142b2a service nova] Acquired lock "refresh_cache-69e7aa90-75af-46ea-95ca-bca19e36dfec" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 988.939219] env[61273]: DEBUG nova.network.neutron [req-ac189e59-bd92-4299-9a51-e3d4702988a0 req-5d094f50-dba0-4664-8b67-828d03142b2a service nova] [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] Refreshing network info cache for port 43474987-c405-46c0-8e73-9c430b8fa49a {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 988.939649] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-ac189e59-bd92-4299-9a51-e3d4702988a0 req-5d094f50-dba0-4664-8b67-828d03142b2a service nova] Expecting reply to msg 379340f8be6b4de9ade563ca345071ba in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 988.947556] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 379340f8be6b4de9ade563ca345071ba [ 989.091044] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 8560c25b17b246aab6f9c9b6343aab8d in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 989.092764] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 2b3ed20fb98c446dba0ce52f37398f58 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 989.102516] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2b3ed20fb98c446dba0ce52f37398f58 [ 989.135882] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8560c25b17b246aab6f9c9b6343aab8d [ 989.176428] env[61273]: ERROR nova.compute.manager [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 43474987-c405-46c0-8e73-9c430b8fa49a, please check neutron logs for more information. [ 989.176428] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 989.176428] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 989.176428] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 989.176428] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 989.176428] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 989.176428] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 989.176428] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 989.176428] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 989.176428] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 989.176428] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 989.176428] env[61273]: ERROR nova.compute.manager raise self.value [ 989.176428] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 989.176428] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 989.176428] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 989.176428] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 989.177051] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 989.177051] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 989.177051] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 43474987-c405-46c0-8e73-9c430b8fa49a, please check neutron logs for more information. [ 989.177051] env[61273]: ERROR nova.compute.manager [ 989.177051] env[61273]: Traceback (most recent call last): [ 989.177051] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 989.177051] env[61273]: listener.cb(fileno) [ 989.177051] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 989.177051] env[61273]: result = function(*args, **kwargs) [ 989.177051] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 989.177051] env[61273]: return func(*args, **kwargs) [ 989.177051] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 989.177051] env[61273]: raise e [ 989.177051] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 989.177051] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 989.177051] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 989.177051] env[61273]: created_port_ids = self._update_ports_for_instance( [ 989.177051] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 989.177051] env[61273]: with excutils.save_and_reraise_exception(): [ 989.177051] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 989.177051] env[61273]: self.force_reraise() [ 989.177051] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 989.177051] env[61273]: raise self.value [ 989.177051] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 989.177051] env[61273]: updated_port = self._update_port( [ 989.177051] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 989.177051] env[61273]: _ensure_no_port_binding_failure(port) [ 989.177051] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 989.177051] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 989.178057] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 43474987-c405-46c0-8e73-9c430b8fa49a, please check neutron logs for more information. [ 989.178057] env[61273]: Removing descriptor: 19 [ 989.241234] env[61273]: DEBUG nova.network.neutron [-] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 989.241793] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg d2753b5d5af541648c06f2f6c793de74 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 989.250216] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d2753b5d5af541648c06f2f6c793de74 [ 989.456432] env[61273]: DEBUG nova.network.neutron [req-ac189e59-bd92-4299-9a51-e3d4702988a0 req-5d094f50-dba0-4664-8b67-828d03142b2a service nova] [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 989.517610] env[61273]: DEBUG nova.network.neutron [req-ac189e59-bd92-4299-9a51-e3d4702988a0 req-5d094f50-dba0-4664-8b67-828d03142b2a service nova] [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 989.518134] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-ac189e59-bd92-4299-9a51-e3d4702988a0 req-5d094f50-dba0-4664-8b67-828d03142b2a service nova] Expecting reply to msg 71edd165ed0140ecaf0dbd9451d6f6fb in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 989.526162] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 71edd165ed0140ecaf0dbd9451d6f6fb [ 989.595700] env[61273]: DEBUG nova.compute.manager [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 989.626371] env[61273]: DEBUG nova.virt.hardware [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 989.626706] env[61273]: DEBUG nova.virt.hardware [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 989.626918] env[61273]: DEBUG nova.virt.hardware [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 989.627122] env[61273]: DEBUG nova.virt.hardware [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 989.627272] env[61273]: DEBUG nova.virt.hardware [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 989.627421] env[61273]: DEBUG nova.virt.hardware [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 989.627621] env[61273]: DEBUG nova.virt.hardware [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 989.627781] env[61273]: DEBUG nova.virt.hardware [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 989.627947] env[61273]: DEBUG nova.virt.hardware [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 989.628133] env[61273]: DEBUG nova.virt.hardware [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 989.628343] env[61273]: DEBUG nova.virt.hardware [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 989.629186] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23e91678-ded0-4019-985e-9fd18a7efba0 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.639032] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddc85649-ac8f-4c9b-904b-28e7fae55b29 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.654796] env[61273]: ERROR nova.compute.manager [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 43474987-c405-46c0-8e73-9c430b8fa49a, please check neutron logs for more information. [ 989.654796] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] Traceback (most recent call last): [ 989.654796] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 989.654796] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] yield resources [ 989.654796] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 989.654796] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] self.driver.spawn(context, instance, image_meta, [ 989.654796] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 989.654796] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] self._vmops.spawn(context, instance, image_meta, injected_files, [ 989.654796] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 989.654796] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] vm_ref = self.build_virtual_machine(instance, [ 989.654796] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 989.655207] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] vif_infos = vmwarevif.get_vif_info(self._session, [ 989.655207] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 989.655207] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] for vif in network_info: [ 989.655207] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 989.655207] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] return self._sync_wrapper(fn, *args, **kwargs) [ 989.655207] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 989.655207] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] self.wait() [ 989.655207] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 989.655207] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] self[:] = self._gt.wait() [ 989.655207] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 989.655207] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] return self._exit_event.wait() [ 989.655207] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 989.655207] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] current.throw(*self._exc) [ 989.655674] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 989.655674] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] result = function(*args, **kwargs) [ 989.655674] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 989.655674] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] return func(*args, **kwargs) [ 989.655674] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 989.655674] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] raise e [ 989.655674] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 989.655674] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] nwinfo = self.network_api.allocate_for_instance( [ 989.655674] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 989.655674] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] created_port_ids = self._update_ports_for_instance( [ 989.655674] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 989.655674] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] with excutils.save_and_reraise_exception(): [ 989.655674] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 989.656223] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] self.force_reraise() [ 989.656223] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 989.656223] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] raise self.value [ 989.656223] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 989.656223] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] updated_port = self._update_port( [ 989.656223] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 989.656223] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] _ensure_no_port_binding_failure(port) [ 989.656223] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 989.656223] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] raise exception.PortBindingFailed(port_id=port['id']) [ 989.656223] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] nova.exception.PortBindingFailed: Binding failed for port 43474987-c405-46c0-8e73-9c430b8fa49a, please check neutron logs for more information. [ 989.656223] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] [ 989.656223] env[61273]: INFO nova.compute.manager [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] Terminating instance [ 989.657195] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Acquiring lock "refresh_cache-69e7aa90-75af-46ea-95ca-bca19e36dfec" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 989.658886] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8f4f04b-878d-44c5-8151-cda6bf0d3ab4 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.665060] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-827d52ec-2822-4c1d-87c7-07944145d530 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.697202] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae681718-ccf5-434f-a16f-740bd17f922f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.704227] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbef7530-2676-4cff-aa80-dabfbf95e386 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.716794] env[61273]: DEBUG nova.compute.provider_tree [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 989.717258] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 4f65a7b15d2f460ea4a5c8a959e61337 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 989.724442] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4f65a7b15d2f460ea4a5c8a959e61337 [ 989.744230] env[61273]: INFO nova.compute.manager [-] [instance: 518bde7d-d2b2-4b53-b30c-37a7c9d29064] Took 1.02 seconds to deallocate network for instance. [ 989.748013] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-98ac3efa-f7bb-4ea6-b8cf-447e97252908 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Expecting reply to msg 2ebf41763e9d4f7db406c18edb0affd5 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 989.776810] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2ebf41763e9d4f7db406c18edb0affd5 [ 990.020171] env[61273]: DEBUG oslo_concurrency.lockutils [req-ac189e59-bd92-4299-9a51-e3d4702988a0 req-5d094f50-dba0-4664-8b67-828d03142b2a service nova] Releasing lock "refresh_cache-69e7aa90-75af-46ea-95ca-bca19e36dfec" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 990.020531] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Acquired lock "refresh_cache-69e7aa90-75af-46ea-95ca-bca19e36dfec" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 990.020720] env[61273]: DEBUG nova.network.neutron [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 990.021164] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg cd400bd4211446e9bf4b6a1cd69d56d2 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 990.027946] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cd400bd4211446e9bf4b6a1cd69d56d2 [ 990.219476] env[61273]: DEBUG nova.scheduler.client.report [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 990.221894] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 0884c76a037b480d8acb406145af2ccf in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 990.233064] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0884c76a037b480d8acb406145af2ccf [ 990.250052] env[61273]: DEBUG oslo_concurrency.lockutils [None req-98ac3efa-f7bb-4ea6-b8cf-447e97252908 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 990.537661] env[61273]: DEBUG nova.network.neutron [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 990.615618] env[61273]: DEBUG nova.network.neutron [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 990.616215] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 3ba952ee86d64e1391c962235724254b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 990.624373] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3ba952ee86d64e1391c962235724254b [ 990.729795] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.146s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 990.730333] env[61273]: DEBUG nova.compute.manager [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 990.732099] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg f5f1d3a7d3224a579e415343a6ad4b5b in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 990.733191] env[61273]: DEBUG oslo_concurrency.lockutils [None req-98ac3efa-f7bb-4ea6-b8cf-447e97252908 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.483s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 990.733414] env[61273]: DEBUG nova.objects.instance [None req-98ac3efa-f7bb-4ea6-b8cf-447e97252908 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Lazy-loading 'resources' on Instance uuid 518bde7d-d2b2-4b53-b30c-37a7c9d29064 {{(pid=61273) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 990.733756] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-98ac3efa-f7bb-4ea6-b8cf-447e97252908 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Expecting reply to msg 168d552be63d4c5bb0c6128e6f9b589a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 990.740299] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 168d552be63d4c5bb0c6128e6f9b589a [ 990.763340] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f5f1d3a7d3224a579e415343a6ad4b5b [ 990.967462] env[61273]: DEBUG nova.compute.manager [req-c96543ce-d39a-4d31-85d0-9165c11e4121 req-3ee5a1c0-ddd2-42ce-b7fa-496eb03cf712 service nova] [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] Received event network-vif-deleted-43474987-c405-46c0-8e73-9c430b8fa49a {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 991.118822] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Releasing lock "refresh_cache-69e7aa90-75af-46ea-95ca-bca19e36dfec" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 991.119259] env[61273]: DEBUG nova.compute.manager [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 991.119453] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 991.119758] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-055b492a-d2a6-46ef-9831-fa11ae55d978 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.129085] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7af8a85-ebe6-463f-b8b3-e63faa66e95a {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.152487] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 69e7aa90-75af-46ea-95ca-bca19e36dfec could not be found. [ 991.152671] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 991.152853] env[61273]: INFO nova.compute.manager [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] Took 0.03 seconds to destroy the instance on the hypervisor. [ 991.153087] env[61273]: DEBUG oslo.service.loopingcall [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 991.153338] env[61273]: DEBUG nova.compute.manager [-] [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 991.153449] env[61273]: DEBUG nova.network.neutron [-] [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 991.167655] env[61273]: DEBUG nova.network.neutron [-] [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 991.168202] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 387618d7b68b4bc58f4d83702376d9cb in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 991.175760] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 387618d7b68b4bc58f4d83702376d9cb [ 991.236169] env[61273]: DEBUG nova.compute.utils [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 991.236756] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 812faa5702fc49628335aa9b193d8e98 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 991.240668] env[61273]: DEBUG nova.compute.manager [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 991.240861] env[61273]: DEBUG nova.network.neutron [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 991.248522] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 812faa5702fc49628335aa9b193d8e98 [ 991.289433] env[61273]: DEBUG nova.policy [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8969ac54b88a47028e5784f6575f2d61', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '63d2fb7de0ad453dbe6891e6974f1b66', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 991.292330] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e137d28-e7e5-4b12-a960-c7cebe1cd1c6 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.299607] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36e70c74-791f-4afc-b842-559adb22caf3 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.331603] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-474c3eaf-800c-4217-8838-9a48421c862f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.339493] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68c764e1-04c8-41ed-8591-d910148f8e7c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.353958] env[61273]: DEBUG nova.compute.provider_tree [None req-98ac3efa-f7bb-4ea6-b8cf-447e97252908 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 991.354821] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-98ac3efa-f7bb-4ea6-b8cf-447e97252908 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Expecting reply to msg 841c8ec17c0d4529a39d6dbe6b38e842 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 991.366097] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 841c8ec17c0d4529a39d6dbe6b38e842 [ 991.568824] env[61273]: DEBUG nova.network.neutron [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] Successfully created port: 1d1878be-2615-4a0e-9c52-5ae375ffee0f {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 991.670477] env[61273]: DEBUG nova.network.neutron [-] [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 991.671280] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg bad33eb155724c0190bc3f4862d90c7a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 991.680030] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bad33eb155724c0190bc3f4862d90c7a [ 991.741831] env[61273]: DEBUG nova.compute.manager [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 991.744277] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 19d2d33585dc4fb48146bae763bcc6dd in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 991.794379] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 19d2d33585dc4fb48146bae763bcc6dd [ 991.857530] env[61273]: DEBUG nova.scheduler.client.report [None req-98ac3efa-f7bb-4ea6-b8cf-447e97252908 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 991.861286] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-98ac3efa-f7bb-4ea6-b8cf-447e97252908 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Expecting reply to msg c341e5d0ea7e4f3daf19923620bbb125 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 991.872043] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c341e5d0ea7e4f3daf19923620bbb125 [ 992.173956] env[61273]: INFO nova.compute.manager [-] [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] Took 1.02 seconds to deallocate network for instance. [ 992.176819] env[61273]: DEBUG nova.compute.claims [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 992.177237] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 992.249229] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg a645745eebc34ca4bd86182d32e3021f in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 992.288754] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a645745eebc34ca4bd86182d32e3021f [ 992.364425] env[61273]: DEBUG oslo_concurrency.lockutils [None req-98ac3efa-f7bb-4ea6-b8cf-447e97252908 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.631s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 992.367978] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.190s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 992.370466] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg bcbd93e12515495e840e3e41fd92becc in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 992.392337] env[61273]: INFO nova.scheduler.client.report [None req-98ac3efa-f7bb-4ea6-b8cf-447e97252908 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Deleted allocations for instance 518bde7d-d2b2-4b53-b30c-37a7c9d29064 [ 992.394611] env[61273]: ERROR nova.compute.manager [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 1d1878be-2615-4a0e-9c52-5ae375ffee0f, please check neutron logs for more information. [ 992.394611] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 992.394611] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 992.394611] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 992.394611] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 992.394611] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 992.394611] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 992.394611] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 992.394611] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 992.394611] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 992.394611] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 992.394611] env[61273]: ERROR nova.compute.manager raise self.value [ 992.394611] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 992.394611] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 992.394611] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 992.394611] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 992.395171] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 992.395171] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 992.395171] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 1d1878be-2615-4a0e-9c52-5ae375ffee0f, please check neutron logs for more information. [ 992.395171] env[61273]: ERROR nova.compute.manager [ 992.396255] env[61273]: Traceback (most recent call last): [ 992.396390] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 992.396390] env[61273]: listener.cb(fileno) [ 992.396556] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 992.396556] env[61273]: result = function(*args, **kwargs) [ 992.396650] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 992.396650] env[61273]: return func(*args, **kwargs) [ 992.396799] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 992.396799] env[61273]: raise e [ 992.396946] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 992.396946] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 992.397082] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 992.397082] env[61273]: created_port_ids = self._update_ports_for_instance( [ 992.397175] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 992.397175] env[61273]: with excutils.save_and_reraise_exception(): [ 992.397283] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 992.397283] env[61273]: self.force_reraise() [ 992.397381] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 992.397381] env[61273]: raise self.value [ 992.397471] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 992.397471] env[61273]: updated_port = self._update_port( [ 992.397588] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 992.397588] env[61273]: _ensure_no_port_binding_failure(port) [ 992.397680] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 992.397680] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 992.397782] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 1d1878be-2615-4a0e-9c52-5ae375ffee0f, please check neutron logs for more information. [ 992.397850] env[61273]: Removing descriptor: 19 [ 992.399964] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-98ac3efa-f7bb-4ea6-b8cf-447e97252908 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Expecting reply to msg 1702afe4960c45cdb6f00f58b57cb585 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 992.432494] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bcbd93e12515495e840e3e41fd92becc [ 992.438522] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1702afe4960c45cdb6f00f58b57cb585 [ 992.753169] env[61273]: DEBUG nova.compute.manager [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 992.777774] env[61273]: DEBUG nova.virt.hardware [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 992.778288] env[61273]: DEBUG nova.virt.hardware [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 992.778617] env[61273]: DEBUG nova.virt.hardware [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 992.778954] env[61273]: DEBUG nova.virt.hardware [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 992.779249] env[61273]: DEBUG nova.virt.hardware [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 992.779546] env[61273]: DEBUG nova.virt.hardware [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 992.779972] env[61273]: DEBUG nova.virt.hardware [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 992.780323] env[61273]: DEBUG nova.virt.hardware [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 992.780639] env[61273]: DEBUG nova.virt.hardware [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 992.780945] env[61273]: DEBUG nova.virt.hardware [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 992.781258] env[61273]: DEBUG nova.virt.hardware [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 992.782274] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7039e1d2-a40e-4bea-a894-78eca94fa591 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.790597] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c551b1f-07dd-4b46-9289-082bd4f54912 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.804025] env[61273]: ERROR nova.compute.manager [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 1d1878be-2615-4a0e-9c52-5ae375ffee0f, please check neutron logs for more information. [ 992.804025] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] Traceback (most recent call last): [ 992.804025] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 992.804025] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] yield resources [ 992.804025] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 992.804025] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] self.driver.spawn(context, instance, image_meta, [ 992.804025] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 992.804025] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] self._vmops.spawn(context, instance, image_meta, injected_files, [ 992.804025] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 992.804025] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] vm_ref = self.build_virtual_machine(instance, [ 992.804025] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 992.804448] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] vif_infos = vmwarevif.get_vif_info(self._session, [ 992.804448] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 992.804448] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] for vif in network_info: [ 992.804448] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 992.804448] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] return self._sync_wrapper(fn, *args, **kwargs) [ 992.804448] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 992.804448] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] self.wait() [ 992.804448] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 992.804448] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] self[:] = self._gt.wait() [ 992.804448] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 992.804448] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] return self._exit_event.wait() [ 992.804448] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 992.804448] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] current.throw(*self._exc) [ 992.804863] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 992.804863] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] result = function(*args, **kwargs) [ 992.804863] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 992.804863] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] return func(*args, **kwargs) [ 992.804863] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 992.804863] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] raise e [ 992.804863] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 992.804863] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] nwinfo = self.network_api.allocate_for_instance( [ 992.804863] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 992.804863] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] created_port_ids = self._update_ports_for_instance( [ 992.804863] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 992.804863] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] with excutils.save_and_reraise_exception(): [ 992.804863] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 992.805257] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] self.force_reraise() [ 992.805257] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 992.805257] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] raise self.value [ 992.805257] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 992.805257] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] updated_port = self._update_port( [ 992.805257] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 992.805257] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] _ensure_no_port_binding_failure(port) [ 992.805257] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 992.805257] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] raise exception.PortBindingFailed(port_id=port['id']) [ 992.805257] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] nova.exception.PortBindingFailed: Binding failed for port 1d1878be-2615-4a0e-9c52-5ae375ffee0f, please check neutron logs for more information. [ 992.805257] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] [ 992.806139] env[61273]: INFO nova.compute.manager [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] Terminating instance [ 992.808746] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Acquiring lock "refresh_cache-f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 992.809089] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Acquired lock "refresh_cache-f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 992.809411] env[61273]: DEBUG nova.network.neutron [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 992.809956] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 64f6cb18ab4f4bdaaaad1505a10491fd in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 992.816800] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 64f6cb18ab4f4bdaaaad1505a10491fd [ 992.906937] env[61273]: DEBUG oslo_concurrency.lockutils [None req-98ac3efa-f7bb-4ea6-b8cf-447e97252908 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Lock "518bde7d-d2b2-4b53-b30c-37a7c9d29064" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.342s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 992.907614] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-98ac3efa-f7bb-4ea6-b8cf-447e97252908 tempest-ServerShowV257Test-949954514 tempest-ServerShowV257Test-949954514-project-member] Expecting reply to msg ec4e9e3197d84cbba2101682935142fb in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 992.921898] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ec4e9e3197d84cbba2101682935142fb [ 992.928256] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdb9ad87-2785-4532-b91c-11c84d0407e4 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.937420] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39e61c0e-22fa-4151-b85e-e88732019c76 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.972914] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07b67219-ddac-49b0-aeab-98d98d712640 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.982444] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df66f953-e05a-42a1-97c3-382181b4b9fc {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.000222] env[61273]: DEBUG nova.compute.provider_tree [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 993.002061] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg d5312d16bdbc40a989071c09cde63e4e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 993.004680] env[61273]: DEBUG nova.compute.manager [req-9ae7449f-d707-4ee1-b3ad-9db375714aa8 req-8cf678c3-c6d0-4bdd-ade7-9d615c4fd07a service nova] [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] Received event network-changed-1d1878be-2615-4a0e-9c52-5ae375ffee0f {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 993.004878] env[61273]: DEBUG nova.compute.manager [req-9ae7449f-d707-4ee1-b3ad-9db375714aa8 req-8cf678c3-c6d0-4bdd-ade7-9d615c4fd07a service nova] [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] Refreshing instance network info cache due to event network-changed-1d1878be-2615-4a0e-9c52-5ae375ffee0f. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 993.005070] env[61273]: DEBUG oslo_concurrency.lockutils [req-9ae7449f-d707-4ee1-b3ad-9db375714aa8 req-8cf678c3-c6d0-4bdd-ade7-9d615c4fd07a service nova] Acquiring lock "refresh_cache-f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 993.008892] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d5312d16bdbc40a989071c09cde63e4e [ 993.328100] env[61273]: DEBUG nova.network.neutron [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 993.405948] env[61273]: DEBUG nova.network.neutron [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 993.406493] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 2be013cb6bd24ccc803b281c4834fe24 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 993.414588] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2be013cb6bd24ccc803b281c4834fe24 [ 993.507161] env[61273]: DEBUG nova.scheduler.client.report [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 993.509489] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 54c1bbc8ae4543b8b252bd29f372bf38 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 993.520284] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 54c1bbc8ae4543b8b252bd29f372bf38 [ 993.908713] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Releasing lock "refresh_cache-f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 993.909179] env[61273]: DEBUG nova.compute.manager [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 993.909391] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 993.909746] env[61273]: DEBUG oslo_concurrency.lockutils [req-9ae7449f-d707-4ee1-b3ad-9db375714aa8 req-8cf678c3-c6d0-4bdd-ade7-9d615c4fd07a service nova] Acquired lock "refresh_cache-f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 993.909915] env[61273]: DEBUG nova.network.neutron [req-9ae7449f-d707-4ee1-b3ad-9db375714aa8 req-8cf678c3-c6d0-4bdd-ade7-9d615c4fd07a service nova] [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] Refreshing network info cache for port 1d1878be-2615-4a0e-9c52-5ae375ffee0f {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 993.910360] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-9ae7449f-d707-4ee1-b3ad-9db375714aa8 req-8cf678c3-c6d0-4bdd-ade7-9d615c4fd07a service nova] Expecting reply to msg 7a5a4b7e56474999855e11936d3d6f97 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 993.911173] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8980d64a-9e3a-422a-b736-efa542f6d25c {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.917915] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7a5a4b7e56474999855e11936d3d6f97 [ 993.920775] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19e4a992-0252-4b10-965f-e81998735ebe {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.941244] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828 could not be found. [ 993.941452] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 993.941616] env[61273]: INFO nova.compute.manager [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] Took 0.03 seconds to destroy the instance on the hypervisor. [ 993.941880] env[61273]: DEBUG oslo.service.loopingcall [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 993.942090] env[61273]: DEBUG nova.compute.manager [-] [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 993.942175] env[61273]: DEBUG nova.network.neutron [-] [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 993.957040] env[61273]: DEBUG nova.network.neutron [-] [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 993.957477] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg bc505fc3b63c4c799ae8216cea319f2a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 993.964751] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bc505fc3b63c4c799ae8216cea319f2a [ 994.011894] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.644s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 994.012529] env[61273]: ERROR nova.compute.manager [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 43474987-c405-46c0-8e73-9c430b8fa49a, please check neutron logs for more information. [ 994.012529] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] Traceback (most recent call last): [ 994.012529] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 994.012529] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] self.driver.spawn(context, instance, image_meta, [ 994.012529] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 994.012529] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] self._vmops.spawn(context, instance, image_meta, injected_files, [ 994.012529] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 994.012529] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] vm_ref = self.build_virtual_machine(instance, [ 994.012529] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 994.012529] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] vif_infos = vmwarevif.get_vif_info(self._session, [ 994.012529] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 994.013152] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] for vif in network_info: [ 994.013152] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 994.013152] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] return self._sync_wrapper(fn, *args, **kwargs) [ 994.013152] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 994.013152] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] self.wait() [ 994.013152] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 994.013152] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] self[:] = self._gt.wait() [ 994.013152] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 994.013152] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] return self._exit_event.wait() [ 994.013152] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 994.013152] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] current.throw(*self._exc) [ 994.013152] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 994.013152] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] result = function(*args, **kwargs) [ 994.013802] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 994.013802] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] return func(*args, **kwargs) [ 994.013802] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 994.013802] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] raise e [ 994.013802] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 994.013802] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] nwinfo = self.network_api.allocate_for_instance( [ 994.013802] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 994.013802] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] created_port_ids = self._update_ports_for_instance( [ 994.013802] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 994.013802] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] with excutils.save_and_reraise_exception(): [ 994.013802] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 994.013802] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] self.force_reraise() [ 994.013802] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 994.014453] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] raise self.value [ 994.014453] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 994.014453] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] updated_port = self._update_port( [ 994.014453] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 994.014453] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] _ensure_no_port_binding_failure(port) [ 994.014453] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 994.014453] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] raise exception.PortBindingFailed(port_id=port['id']) [ 994.014453] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] nova.exception.PortBindingFailed: Binding failed for port 43474987-c405-46c0-8e73-9c430b8fa49a, please check neutron logs for more information. [ 994.014453] env[61273]: ERROR nova.compute.manager [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] [ 994.014453] env[61273]: DEBUG nova.compute.utils [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] Binding failed for port 43474987-c405-46c0-8e73-9c430b8fa49a, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 994.014957] env[61273]: DEBUG nova.compute.manager [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] Build of instance 69e7aa90-75af-46ea-95ca-bca19e36dfec was re-scheduled: Binding failed for port 43474987-c405-46c0-8e73-9c430b8fa49a, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 994.015346] env[61273]: DEBUG nova.compute.manager [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 994.015568] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Acquiring lock "refresh_cache-69e7aa90-75af-46ea-95ca-bca19e36dfec" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 994.015712] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Acquired lock "refresh_cache-69e7aa90-75af-46ea-95ca-bca19e36dfec" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 994.015867] env[61273]: DEBUG nova.network.neutron [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 994.016288] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg d6fbd54a59464735824a6f9fe3be57ee in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 994.023035] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d6fbd54a59464735824a6f9fe3be57ee [ 994.428755] env[61273]: DEBUG nova.network.neutron [req-9ae7449f-d707-4ee1-b3ad-9db375714aa8 req-8cf678c3-c6d0-4bdd-ade7-9d615c4fd07a service nova] [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 994.458986] env[61273]: DEBUG nova.network.neutron [-] [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 994.459645] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 714930e811504c3095a220d793501865 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 994.467343] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 714930e811504c3095a220d793501865 [ 994.491920] env[61273]: DEBUG nova.network.neutron [req-9ae7449f-d707-4ee1-b3ad-9db375714aa8 req-8cf678c3-c6d0-4bdd-ade7-9d615c4fd07a service nova] [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 994.492485] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-9ae7449f-d707-4ee1-b3ad-9db375714aa8 req-8cf678c3-c6d0-4bdd-ade7-9d615c4fd07a service nova] Expecting reply to msg 34c307d8dda34945bb5def031c3ba9a3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 994.500864] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 34c307d8dda34945bb5def031c3ba9a3 [ 994.531867] env[61273]: DEBUG nova.network.neutron [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 994.593435] env[61273]: DEBUG nova.network.neutron [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 994.594111] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg acd4bf2624c34d6dbe73b7e86f04a776 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 994.602404] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg acd4bf2624c34d6dbe73b7e86f04a776 [ 994.963983] env[61273]: INFO nova.compute.manager [-] [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] Took 1.02 seconds to deallocate network for instance. [ 994.966439] env[61273]: DEBUG nova.compute.claims [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 994.966693] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 994.966981] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 994.968959] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 7793ab911d5144cabcf72bb923300d02 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 994.994924] env[61273]: DEBUG oslo_concurrency.lockutils [req-9ae7449f-d707-4ee1-b3ad-9db375714aa8 req-8cf678c3-c6d0-4bdd-ade7-9d615c4fd07a service nova] Releasing lock "refresh_cache-f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 994.995282] env[61273]: DEBUG nova.compute.manager [req-9ae7449f-d707-4ee1-b3ad-9db375714aa8 req-8cf678c3-c6d0-4bdd-ade7-9d615c4fd07a service nova] [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] Received event network-vif-deleted-1d1878be-2615-4a0e-9c52-5ae375ffee0f {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 995.006927] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7793ab911d5144cabcf72bb923300d02 [ 995.096369] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Releasing lock "refresh_cache-69e7aa90-75af-46ea-95ca-bca19e36dfec" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 995.096619] env[61273]: DEBUG nova.compute.manager [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 995.096809] env[61273]: DEBUG nova.compute.manager [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 995.096979] env[61273]: DEBUG nova.network.neutron [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 995.112015] env[61273]: DEBUG nova.network.neutron [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 995.112569] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg f438ec1d0cd844079e6803fe2268e91e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 995.118971] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f438ec1d0cd844079e6803fe2268e91e [ 995.513059] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1013eed6-72c7-4456-b984-385fd179bac9 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.521875] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d0701c0-d0c2-4a97-ae8d-5262a2402c35 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.551228] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00398861-30b2-446e-bdd1-c85e888f91fd {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.558894] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea04993b-9ca8-4362-ad4c-5541e1fdd420 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.571660] env[61273]: DEBUG nova.compute.provider_tree [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 995.571772] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 7204e5eba53646fbb64548159dc4d733 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 995.578814] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7204e5eba53646fbb64548159dc4d733 [ 995.615502] env[61273]: DEBUG nova.network.neutron [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 995.616034] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 3f969bdc059c4ac6ac54f6c57038b483 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 995.623807] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3f969bdc059c4ac6ac54f6c57038b483 [ 996.076145] env[61273]: DEBUG nova.scheduler.client.report [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 996.078400] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg e55fff8034064c948016b174e4c6a7ed in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 996.088854] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e55fff8034064c948016b174e4c6a7ed [ 996.117725] env[61273]: INFO nova.compute.manager [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 69e7aa90-75af-46ea-95ca-bca19e36dfec] Took 1.02 seconds to deallocate network for instance. [ 996.119279] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 3a7b7d04e99a4d6cae7660e2d7f3064c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 996.149247] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3a7b7d04e99a4d6cae7660e2d7f3064c [ 996.582770] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.616s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 996.583437] env[61273]: ERROR nova.compute.manager [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 1d1878be-2615-4a0e-9c52-5ae375ffee0f, please check neutron logs for more information. [ 996.583437] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] Traceback (most recent call last): [ 996.583437] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 996.583437] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] self.driver.spawn(context, instance, image_meta, [ 996.583437] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 996.583437] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] self._vmops.spawn(context, instance, image_meta, injected_files, [ 996.583437] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 996.583437] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] vm_ref = self.build_virtual_machine(instance, [ 996.583437] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 996.583437] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] vif_infos = vmwarevif.get_vif_info(self._session, [ 996.583437] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 996.583823] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] for vif in network_info: [ 996.583823] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 996.583823] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] return self._sync_wrapper(fn, *args, **kwargs) [ 996.583823] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 996.583823] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] self.wait() [ 996.583823] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 996.583823] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] self[:] = self._gt.wait() [ 996.583823] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 996.583823] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] return self._exit_event.wait() [ 996.583823] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 996.583823] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] current.throw(*self._exc) [ 996.583823] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 996.583823] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] result = function(*args, **kwargs) [ 996.584406] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 996.584406] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] return func(*args, **kwargs) [ 996.584406] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 996.584406] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] raise e [ 996.584406] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 996.584406] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] nwinfo = self.network_api.allocate_for_instance( [ 996.584406] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 996.584406] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] created_port_ids = self._update_ports_for_instance( [ 996.584406] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 996.584406] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] with excutils.save_and_reraise_exception(): [ 996.584406] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 996.584406] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] self.force_reraise() [ 996.584406] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 996.584923] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] raise self.value [ 996.584923] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 996.584923] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] updated_port = self._update_port( [ 996.584923] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 996.584923] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] _ensure_no_port_binding_failure(port) [ 996.584923] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 996.584923] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] raise exception.PortBindingFailed(port_id=port['id']) [ 996.584923] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] nova.exception.PortBindingFailed: Binding failed for port 1d1878be-2615-4a0e-9c52-5ae375ffee0f, please check neutron logs for more information. [ 996.584923] env[61273]: ERROR nova.compute.manager [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] [ 996.584923] env[61273]: DEBUG nova.compute.utils [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] Binding failed for port 1d1878be-2615-4a0e-9c52-5ae375ffee0f, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 996.586139] env[61273]: DEBUG nova.compute.manager [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] Build of instance f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828 was re-scheduled: Binding failed for port 1d1878be-2615-4a0e-9c52-5ae375ffee0f, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 996.586543] env[61273]: DEBUG nova.compute.manager [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 996.586771] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Acquiring lock "refresh_cache-f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 996.586923] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Acquired lock "refresh_cache-f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 996.587114] env[61273]: DEBUG nova.network.neutron [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 996.587520] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg bae566e51f9646e6b0fe6c5be327da3e in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 996.593957] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bae566e51f9646e6b0fe6c5be327da3e [ 996.624017] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg c3aaaac40cca4b9095217ae8c8e30f73 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 996.652966] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c3aaaac40cca4b9095217ae8c8e30f73 [ 997.103871] env[61273]: DEBUG nova.network.neutron [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 997.151363] env[61273]: INFO nova.scheduler.client.report [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Deleted allocations for instance 69e7aa90-75af-46ea-95ca-bca19e36dfec [ 997.165503] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg c075d65ce70a468bb384d0cdc50de1c1 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 997.180392] env[61273]: DEBUG nova.network.neutron [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 997.181277] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 7422c65b547c49c28684081412849c74 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 997.182302] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c075d65ce70a468bb384d0cdc50de1c1 [ 997.193279] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7422c65b547c49c28684081412849c74 [ 997.668646] env[61273]: DEBUG oslo_concurrency.lockutils [None req-e1075c80-68e3-4692-a113-629c2181b7d8 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Lock "69e7aa90-75af-46ea-95ca-bca19e36dfec" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.270s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 997.684690] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Releasing lock "refresh_cache-f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 997.684808] env[61273]: DEBUG nova.compute.manager [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 997.684904] env[61273]: DEBUG nova.compute.manager [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 997.685076] env[61273]: DEBUG nova.network.neutron [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 997.699174] env[61273]: DEBUG nova.network.neutron [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 997.699787] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 50e082af8bae4c66885d01bcd0970531 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 997.706704] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 50e082af8bae4c66885d01bcd0970531 [ 998.201987] env[61273]: DEBUG nova.network.neutron [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 998.202557] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg c49c8c5c007c498b9200f8edd8a84276 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 998.214782] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c49c8c5c007c498b9200f8edd8a84276 [ 998.707744] env[61273]: INFO nova.compute.manager [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] [instance: f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828] Took 1.02 seconds to deallocate network for instance. [ 998.707744] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg f255abde17ee41b3b290064bc120ae0a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 998.741437] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f255abde17ee41b3b290064bc120ae0a [ 998.912450] env[61273]: DEBUG oslo_concurrency.lockutils [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Acquiring lock "74f79e89-3508-474f-ac4e-cc202231eed7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 998.912635] env[61273]: DEBUG oslo_concurrency.lockutils [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Lock "74f79e89-3508-474f-ac4e-cc202231eed7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 998.913199] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 8c6e8a1819a940d6aecc968a1ee8b568 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 998.923728] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8c6e8a1819a940d6aecc968a1ee8b568 [ 999.211980] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg fed84bf0fa4a47ba9ae3a1e69a25627c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 999.251725] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fed84bf0fa4a47ba9ae3a1e69a25627c [ 999.419534] env[61273]: DEBUG nova.compute.manager [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] Starting instance... {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 999.420897] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 9daf9e8e50d24cc59ae68fc3fec891f7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 999.450576] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9daf9e8e50d24cc59ae68fc3fec891f7 [ 999.745965] env[61273]: INFO nova.scheduler.client.report [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Deleted allocations for instance f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828 [ 999.755173] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Expecting reply to msg 805b7c92b2424fd0b817b7440045f9a9 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 999.766729] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 805b7c92b2424fd0b817b7440045f9a9 [ 999.937324] env[61273]: DEBUG oslo_concurrency.lockutils [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 999.937606] env[61273]: DEBUG oslo_concurrency.lockutils [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 999.938986] env[61273]: INFO nova.compute.claims [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 999.940666] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 4ec8e90c93bb432e9808103a5efee5d3 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 999.973979] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4ec8e90c93bb432e9808103a5efee5d3 [ 1000.254960] env[61273]: DEBUG oslo_concurrency.lockutils [None req-3f15f766-fb75-462f-b6c8-fd76785b1b98 tempest-ServerDiskConfigTestJSON-2141274115 tempest-ServerDiskConfigTestJSON-2141274115-project-member] Lock "f1051b8f-fcfc-4aaa-8e63-9fafbb8f5828" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.212s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1000.445340] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg fea0be90a5a240009113d183a4538f64 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 1000.451818] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fea0be90a5a240009113d183a4538f64 [ 1000.980462] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f022c716-f80e-41a1-99a5-ed3e8edc37fd {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.989255] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bedba2c-2177-46b5-a955-61dd3844f697 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.019469] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d67a20d7-411e-4f50-a176-a623ac45ccf1 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.026876] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c9fa968-1c55-4e4b-a28f-2739422fa397 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.039997] env[61273]: DEBUG nova.compute.provider_tree [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1001.040485] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 6424a041c3ba42028e62dbbec44ecbe9 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 1001.049625] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6424a041c3ba42028e62dbbec44ecbe9 [ 1001.543591] env[61273]: DEBUG nova.scheduler.client.report [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1001.545831] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 835a62d0c7174bb0b188ee869fef9c58 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 1001.560669] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 835a62d0c7174bb0b188ee869fef9c58 [ 1002.048727] env[61273]: DEBUG oslo_concurrency.lockutils [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.111s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1002.049609] env[61273]: DEBUG nova.compute.manager [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] Start building networks asynchronously for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1002.050979] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg fbb44aa593b248c682a682a72be4f7ce in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 1002.091751] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fbb44aa593b248c682a682a72be4f7ce [ 1002.554615] env[61273]: DEBUG nova.compute.utils [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Using /dev/sd instead of None {{(pid=61273) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1002.555357] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg b382a847f1f442b9b9cf811848287f33 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 1002.556426] env[61273]: DEBUG nova.compute.manager [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] Allocating IP information in the background. {{(pid=61273) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1002.556746] env[61273]: DEBUG nova.network.neutron [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] allocate_for_instance() {{(pid=61273) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1002.566920] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b382a847f1f442b9b9cf811848287f33 [ 1002.606505] env[61273]: DEBUG nova.policy [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'af34c4e3d81c4729a9dd4a8531992ff1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd9325f1def284d2a9fdced4e9eeb17f0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61273) authorize /opt/stack/nova/nova/policy.py:203}} [ 1002.833797] env[61273]: DEBUG nova.network.neutron [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] Successfully created port: 4a2145c6-50f5-4f1c-86d8-12ac61bec027 {{(pid=61273) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1003.059870] env[61273]: DEBUG nova.compute.manager [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] Start building block device mappings for instance. {{(pid=61273) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1003.061585] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 0e17a3d8d0084131bbccf0e72c9a978c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 1003.102723] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0e17a3d8d0084131bbccf0e72c9a978c [ 1003.388213] env[61273]: DEBUG nova.compute.manager [req-bb1dd543-ddec-41f0-bb6c-af4ef194e05a req-2fa8132d-bfff-4028-9557-5833330bf28e service nova] [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] Received event network-changed-4a2145c6-50f5-4f1c-86d8-12ac61bec027 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 1003.388516] env[61273]: DEBUG nova.compute.manager [req-bb1dd543-ddec-41f0-bb6c-af4ef194e05a req-2fa8132d-bfff-4028-9557-5833330bf28e service nova] [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] Refreshing instance network info cache due to event network-changed-4a2145c6-50f5-4f1c-86d8-12ac61bec027. {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 1003.388858] env[61273]: DEBUG oslo_concurrency.lockutils [req-bb1dd543-ddec-41f0-bb6c-af4ef194e05a req-2fa8132d-bfff-4028-9557-5833330bf28e service nova] Acquiring lock "refresh_cache-74f79e89-3508-474f-ac4e-cc202231eed7" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1003.389024] env[61273]: DEBUG oslo_concurrency.lockutils [req-bb1dd543-ddec-41f0-bb6c-af4ef194e05a req-2fa8132d-bfff-4028-9557-5833330bf28e service nova] Acquired lock "refresh_cache-74f79e89-3508-474f-ac4e-cc202231eed7" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1003.389212] env[61273]: DEBUG nova.network.neutron [req-bb1dd543-ddec-41f0-bb6c-af4ef194e05a req-2fa8132d-bfff-4028-9557-5833330bf28e service nova] [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] Refreshing network info cache for port 4a2145c6-50f5-4f1c-86d8-12ac61bec027 {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1003.389674] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-bb1dd543-ddec-41f0-bb6c-af4ef194e05a req-2fa8132d-bfff-4028-9557-5833330bf28e service nova] Expecting reply to msg d6eda03c442d43b4a9e565de2444d4f0 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 1003.398925] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d6eda03c442d43b4a9e565de2444d4f0 [ 1003.553181] env[61273]: ERROR nova.compute.manager [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 4a2145c6-50f5-4f1c-86d8-12ac61bec027, please check neutron logs for more information. [ 1003.553181] env[61273]: ERROR nova.compute.manager Traceback (most recent call last): [ 1003.553181] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 1003.553181] env[61273]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 1003.553181] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 1003.553181] env[61273]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 1003.553181] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 1003.553181] env[61273]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 1003.553181] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1003.553181] env[61273]: ERROR nova.compute.manager self.force_reraise() [ 1003.553181] env[61273]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1003.553181] env[61273]: ERROR nova.compute.manager raise self.value [ 1003.553181] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 1003.553181] env[61273]: ERROR nova.compute.manager updated_port = self._update_port( [ 1003.553181] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1003.553181] env[61273]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 1003.553804] env[61273]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1003.553804] env[61273]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 1003.553804] env[61273]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 4a2145c6-50f5-4f1c-86d8-12ac61bec027, please check neutron logs for more information. [ 1003.553804] env[61273]: ERROR nova.compute.manager [ 1003.553804] env[61273]: Traceback (most recent call last): [ 1003.553804] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 1003.553804] env[61273]: listener.cb(fileno) [ 1003.553804] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1003.553804] env[61273]: result = function(*args, **kwargs) [ 1003.553804] env[61273]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 1003.553804] env[61273]: return func(*args, **kwargs) [ 1003.553804] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 1003.553804] env[61273]: raise e [ 1003.553804] env[61273]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 1003.553804] env[61273]: nwinfo = self.network_api.allocate_for_instance( [ 1003.553804] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 1003.553804] env[61273]: created_port_ids = self._update_ports_for_instance( [ 1003.553804] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 1003.553804] env[61273]: with excutils.save_and_reraise_exception(): [ 1003.553804] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1003.553804] env[61273]: self.force_reraise() [ 1003.553804] env[61273]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1003.553804] env[61273]: raise self.value [ 1003.553804] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 1003.553804] env[61273]: updated_port = self._update_port( [ 1003.553804] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1003.553804] env[61273]: _ensure_no_port_binding_failure(port) [ 1003.553804] env[61273]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1003.553804] env[61273]: raise exception.PortBindingFailed(port_id=port['id']) [ 1003.554824] env[61273]: nova.exception.PortBindingFailed: Binding failed for port 4a2145c6-50f5-4f1c-86d8-12ac61bec027, please check neutron logs for more information. [ 1003.554824] env[61273]: Removing descriptor: 19 [ 1003.566361] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg e780f2154ce840c9aed3d10f55fc4c1a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 1003.625522] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e780f2154ce840c9aed3d10f55fc4c1a [ 1003.907228] env[61273]: DEBUG nova.network.neutron [req-bb1dd543-ddec-41f0-bb6c-af4ef194e05a req-2fa8132d-bfff-4028-9557-5833330bf28e service nova] [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1003.977784] env[61273]: DEBUG nova.network.neutron [req-bb1dd543-ddec-41f0-bb6c-af4ef194e05a req-2fa8132d-bfff-4028-9557-5833330bf28e service nova] [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1003.978306] env[61273]: INFO oslo_messaging._drivers.amqpdriver [req-bb1dd543-ddec-41f0-bb6c-af4ef194e05a req-2fa8132d-bfff-4028-9557-5833330bf28e service nova] Expecting reply to msg 117cb1a99bce4e89aca896f1f8fe8c73 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 1003.986560] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 117cb1a99bce4e89aca896f1f8fe8c73 [ 1004.069176] env[61273]: DEBUG nova.compute.manager [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] Start spawning the instance on the hypervisor. {{(pid=61273) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1004.094978] env[61273]: DEBUG nova.virt.hardware [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-08-02T13:31:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-08-02T13:31:31Z,direct_url=,disk_format='vmdk',id=4a9e718e-a6a1-4b4a-b567-8e55529b2d5b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='56a3f1e59b954d0f933357e6f5de53aa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-08-02T13:31:32Z,virtual_size=,visibility=), allow threads: False {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1004.095238] env[61273]: DEBUG nova.virt.hardware [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Flavor limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1004.095397] env[61273]: DEBUG nova.virt.hardware [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Image limits 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1004.095579] env[61273]: DEBUG nova.virt.hardware [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Flavor pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1004.095724] env[61273]: DEBUG nova.virt.hardware [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Image pref 0:0:0 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1004.095881] env[61273]: DEBUG nova.virt.hardware [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61273) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1004.096098] env[61273]: DEBUG nova.virt.hardware [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1004.096262] env[61273]: DEBUG nova.virt.hardware [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1004.096427] env[61273]: DEBUG nova.virt.hardware [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Got 1 possible topologies {{(pid=61273) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1004.096588] env[61273]: DEBUG nova.virt.hardware [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1004.096753] env[61273]: DEBUG nova.virt.hardware [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61273) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1004.097595] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42258002-d14c-4905-a0fd-af23745e2dc7 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.105695] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ba40afb-dce3-4931-b118-f109885420c0 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.119763] env[61273]: ERROR nova.compute.manager [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 4a2145c6-50f5-4f1c-86d8-12ac61bec027, please check neutron logs for more information. [ 1004.119763] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] Traceback (most recent call last): [ 1004.119763] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 1004.119763] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] yield resources [ 1004.119763] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1004.119763] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] self.driver.spawn(context, instance, image_meta, [ 1004.119763] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1004.119763] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1004.119763] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1004.119763] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] vm_ref = self.build_virtual_machine(instance, [ 1004.119763] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1004.120250] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] vif_infos = vmwarevif.get_vif_info(self._session, [ 1004.120250] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1004.120250] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] for vif in network_info: [ 1004.120250] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 1004.120250] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] return self._sync_wrapper(fn, *args, **kwargs) [ 1004.120250] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 1004.120250] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] self.wait() [ 1004.120250] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 1004.120250] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] self[:] = self._gt.wait() [ 1004.120250] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1004.120250] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] return self._exit_event.wait() [ 1004.120250] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 1004.120250] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] current.throw(*self._exc) [ 1004.120712] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1004.120712] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] result = function(*args, **kwargs) [ 1004.120712] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 1004.120712] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] return func(*args, **kwargs) [ 1004.120712] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 1004.120712] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] raise e [ 1004.120712] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 1004.120712] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] nwinfo = self.network_api.allocate_for_instance( [ 1004.120712] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 1004.120712] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] created_port_ids = self._update_ports_for_instance( [ 1004.120712] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 1004.120712] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] with excutils.save_and_reraise_exception(): [ 1004.120712] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1004.121210] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] self.force_reraise() [ 1004.121210] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1004.121210] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] raise self.value [ 1004.121210] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 1004.121210] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] updated_port = self._update_port( [ 1004.121210] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1004.121210] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] _ensure_no_port_binding_failure(port) [ 1004.121210] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1004.121210] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] raise exception.PortBindingFailed(port_id=port['id']) [ 1004.121210] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] nova.exception.PortBindingFailed: Binding failed for port 4a2145c6-50f5-4f1c-86d8-12ac61bec027, please check neutron logs for more information. [ 1004.121210] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] [ 1004.121210] env[61273]: INFO nova.compute.manager [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] Terminating instance [ 1004.124308] env[61273]: DEBUG oslo_concurrency.lockutils [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Acquiring lock "refresh_cache-74f79e89-3508-474f-ac4e-cc202231eed7" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1004.480377] env[61273]: DEBUG oslo_concurrency.lockutils [req-bb1dd543-ddec-41f0-bb6c-af4ef194e05a req-2fa8132d-bfff-4028-9557-5833330bf28e service nova] Releasing lock "refresh_cache-74f79e89-3508-474f-ac4e-cc202231eed7" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1004.480742] env[61273]: DEBUG oslo_concurrency.lockutils [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Acquired lock "refresh_cache-74f79e89-3508-474f-ac4e-cc202231eed7" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1004.480894] env[61273]: DEBUG nova.network.neutron [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1004.481301] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg f04ee2824d19444dba311aedd03bff80 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 1004.488385] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f04ee2824d19444dba311aedd03bff80 [ 1004.998634] env[61273]: DEBUG nova.network.neutron [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1005.059713] env[61273]: DEBUG nova.network.neutron [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1005.060247] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 898e232c4a854b2d8988864a116f98ca in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 1005.067969] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 898e232c4a854b2d8988864a116f98ca [ 1005.412860] env[61273]: DEBUG nova.compute.manager [req-142270a1-d3d7-48d5-90b3-4a90c706d2c2 req-2e542a63-e815-406d-a1db-14e4573bebd6 service nova] [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] Received event network-vif-deleted-4a2145c6-50f5-4f1c-86d8-12ac61bec027 {{(pid=61273) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 1005.562304] env[61273]: DEBUG oslo_concurrency.lockutils [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Releasing lock "refresh_cache-74f79e89-3508-474f-ac4e-cc202231eed7" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1005.562737] env[61273]: DEBUG nova.compute.manager [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] Start destroying the instance on the hypervisor. {{(pid=61273) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1005.562978] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] Destroying instance {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1005.563286] env[61273]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e141fa20-91c1-497c-8db9-87c57cb96323 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.572288] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8403b01b-d576-4223-960d-1b3e2256eebe {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.592307] env[61273]: WARNING nova.virt.vmwareapi.vmops [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 74f79e89-3508-474f-ac4e-cc202231eed7 could not be found. [ 1005.592508] env[61273]: DEBUG nova.virt.vmwareapi.vmops [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] Instance destroyed {{(pid=61273) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1005.592687] env[61273]: INFO nova.compute.manager [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] Took 0.03 seconds to destroy the instance on the hypervisor. [ 1005.592923] env[61273]: DEBUG oslo.service.loopingcall [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61273) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1005.593155] env[61273]: DEBUG nova.compute.manager [-] [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1005.593249] env[61273]: DEBUG nova.network.neutron [-] [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1005.608157] env[61273]: DEBUG nova.network.neutron [-] [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1005.608596] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg c2abb9b2faf5432c82305f67eef3f533 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 1005.615500] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c2abb9b2faf5432c82305f67eef3f533 [ 1006.110932] env[61273]: DEBUG nova.network.neutron [-] [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1006.111424] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 411a1a73ffc7481c8385cb6cdd1b896c in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 1006.119318] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 411a1a73ffc7481c8385cb6cdd1b896c [ 1006.613915] env[61273]: INFO nova.compute.manager [-] [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] Took 1.02 seconds to deallocate network for instance. [ 1006.616280] env[61273]: DEBUG nova.compute.claims [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] Aborting claim: {{(pid=61273) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1006.616463] env[61273]: DEBUG oslo_concurrency.lockutils [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1006.616683] env[61273]: DEBUG oslo_concurrency.lockutils [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1006.618488] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 63de281980d9447b95f7387e69e3fd3a in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 1006.652399] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 63de281980d9447b95f7387e69e3fd3a [ 1007.150934] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92bdbdee-8cb8-4f41-959a-5f3ce7db2647 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.158118] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbc4dd11-4593-4254-8e80-a263253dd021 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.186698] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d04599b-2267-4b12-8c7e-0c708859aa62 {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.193290] env[61273]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feb25fa7-c2d3-465e-8357-280bbf4bef6f {{(pid=61273) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.205503] env[61273]: DEBUG nova.compute.provider_tree [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Inventory has not changed in ProviderTree for provider: 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb {{(pid=61273) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1007.205989] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg e4519969ff634467b2917ef3b2f6c375 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 1007.212835] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e4519969ff634467b2917ef3b2f6c375 [ 1007.708607] env[61273]: DEBUG nova.scheduler.client.report [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Inventory has not changed for provider 4b2a9d85-76d2-47a9-873e-680d9c1d5ccb based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 141, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61273) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1007.710904] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg ae8fdd442f4a4b0e87e39f3c44016e07 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 1007.722267] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ae8fdd442f4a4b0e87e39f3c44016e07 [ 1008.213658] env[61273]: DEBUG oslo_concurrency.lockutils [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.597s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1008.214411] env[61273]: ERROR nova.compute.manager [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 4a2145c6-50f5-4f1c-86d8-12ac61bec027, please check neutron logs for more information. [ 1008.214411] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] Traceback (most recent call last): [ 1008.214411] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1008.214411] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] self.driver.spawn(context, instance, image_meta, [ 1008.214411] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1008.214411] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1008.214411] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 1008.214411] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] vm_ref = self.build_virtual_machine(instance, [ 1008.214411] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 1008.214411] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] vif_infos = vmwarevif.get_vif_info(self._session, [ 1008.214411] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 1008.214864] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] for vif in network_info: [ 1008.214864] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 1008.214864] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] return self._sync_wrapper(fn, *args, **kwargs) [ 1008.214864] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 1008.214864] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] self.wait() [ 1008.214864] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 1008.214864] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] self[:] = self._gt.wait() [ 1008.214864] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 1008.214864] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] return self._exit_event.wait() [ 1008.214864] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 1008.214864] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] current.throw(*self._exc) [ 1008.214864] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 1008.214864] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] result = function(*args, **kwargs) [ 1008.215255] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 1008.215255] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] return func(*args, **kwargs) [ 1008.215255] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 1008.215255] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] raise e [ 1008.215255] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 1008.215255] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] nwinfo = self.network_api.allocate_for_instance( [ 1008.215255] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 1008.215255] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] created_port_ids = self._update_ports_for_instance( [ 1008.215255] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 1008.215255] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] with excutils.save_and_reraise_exception(): [ 1008.215255] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1008.215255] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] self.force_reraise() [ 1008.215255] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1008.215651] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] raise self.value [ 1008.215651] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 1008.215651] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] updated_port = self._update_port( [ 1008.215651] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 1008.215651] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] _ensure_no_port_binding_failure(port) [ 1008.215651] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 1008.215651] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] raise exception.PortBindingFailed(port_id=port['id']) [ 1008.215651] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] nova.exception.PortBindingFailed: Binding failed for port 4a2145c6-50f5-4f1c-86d8-12ac61bec027, please check neutron logs for more information. [ 1008.215651] env[61273]: ERROR nova.compute.manager [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] [ 1008.215651] env[61273]: DEBUG nova.compute.utils [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] Binding failed for port 4a2145c6-50f5-4f1c-86d8-12ac61bec027, please check neutron logs for more information. {{(pid=61273) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1008.217253] env[61273]: DEBUG nova.compute.manager [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] Build of instance 74f79e89-3508-474f-ac4e-cc202231eed7 was re-scheduled: Binding failed for port 4a2145c6-50f5-4f1c-86d8-12ac61bec027, please check neutron logs for more information. {{(pid=61273) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 1008.217655] env[61273]: DEBUG nova.compute.manager [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] Unplugging VIFs for instance {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 1008.217882] env[61273]: DEBUG oslo_concurrency.lockutils [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Acquiring lock "refresh_cache-74f79e89-3508-474f-ac4e-cc202231eed7" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1008.218040] env[61273]: DEBUG oslo_concurrency.lockutils [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Acquired lock "refresh_cache-74f79e89-3508-474f-ac4e-cc202231eed7" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1008.218275] env[61273]: DEBUG nova.network.neutron [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] Building network info cache for instance {{(pid=61273) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1008.218693] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 8e6c3018ed354a9392dd1c6b4477aa40 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 1008.224948] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8e6c3018ed354a9392dd1c6b4477aa40 [ 1008.736335] env[61273]: DEBUG nova.network.neutron [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1008.794918] env[61273]: DEBUG nova.network.neutron [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1008.795419] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 39b98597d4ee43e4877eaae630dc7fa7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 1008.803563] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 39b98597d4ee43e4877eaae630dc7fa7 [ 1009.297662] env[61273]: DEBUG oslo_concurrency.lockutils [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Releasing lock "refresh_cache-74f79e89-3508-474f-ac4e-cc202231eed7" {{(pid=61273) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1009.297908] env[61273]: DEBUG nova.compute.manager [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61273) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 1009.298092] env[61273]: DEBUG nova.compute.manager [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] Deallocating network for instance {{(pid=61273) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1009.298259] env[61273]: DEBUG nova.network.neutron [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] deallocate_for_instance() {{(pid=61273) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1009.312689] env[61273]: DEBUG nova.network.neutron [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] Instance cache missing network info. {{(pid=61273) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1009.313288] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 6b6173491a054662885bc51ed9a187b7 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 1009.319457] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6b6173491a054662885bc51ed9a187b7 [ 1009.815244] env[61273]: DEBUG nova.network.neutron [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] Updating instance_info_cache with network_info: [] {{(pid=61273) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1009.815788] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 4d98cf5f389346398fb571dec7280f99 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 1009.823284] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4d98cf5f389346398fb571dec7280f99 [ 1010.319063] env[61273]: INFO nova.compute.manager [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] [instance: 74f79e89-3508-474f-ac4e-cc202231eed7] Took 1.02 seconds to deallocate network for instance. [ 1010.319840] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg e12e33e7665d49c28985d29df03b5772 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 1010.353582] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e12e33e7665d49c28985d29df03b5772 [ 1010.824753] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 8d11b4838b7f47cf937fe5eea4028cf6 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 1010.861768] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8d11b4838b7f47cf937fe5eea4028cf6 [ 1011.353670] env[61273]: INFO nova.scheduler.client.report [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Deleted allocations for instance 74f79e89-3508-474f-ac4e-cc202231eed7 [ 1011.359468] env[61273]: INFO oslo_messaging._drivers.amqpdriver [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Expecting reply to msg 00119004aa8048e99f409612a8932159 in queue reply_1a10a9f2080b4c739845c67ef4f4f9e6 [ 1011.386766] env[61273]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 00119004aa8048e99f409612a8932159 [ 1011.865831] env[61273]: DEBUG oslo_concurrency.lockutils [None req-bdedfe07-2453-4e3d-8f01-5d69cf9903a4 tempest-ServersTestJSON-462811407 tempest-ServersTestJSON-462811407-project-member] Lock "74f79e89-3508-474f-ac4e-cc202231eed7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.953s {{(pid=61273) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}}